diff --git a/ci/scripts/build_and_test.sh b/ci/scripts/build_and_test.sh
index e1dde33c1ac31275443047ce1585233b527108cb..7d916c2bb484992d02ae4bf6b98e1547b061e8df 100755
--- a/ci/scripts/build_and_test.sh
+++ b/ci/scripts/build_and_test.sh
@@ -38,6 +38,7 @@ cp -r /hysop "${HYSOP_DIR}"
 
 cd "${HYSOP_DIR}"
 ${SCRIPT_DIR}/version.sh
+rm -rf ${HYSOP_BUILD_DIR}
 meson setup      ${HYSOP_BUILD_DIR}
 meson compile -C ${HYSOP_BUILD_DIR}
 meson install -C ${HYSOP_BUILD_DIR}
diff --git a/ci/scripts/run_examples.sh b/ci/scripts/run_examples.sh
index b69abfcadfbd9b47f0366706b98d06fe7bc806db..b34c08797d71079bb2098af8b17bcdf7f146e112 100755
--- a/ci/scripts/run_examples.sh
+++ b/ci/scripts/run_examples.sh
@@ -52,3 +52,24 @@ example_test "bubble/periodic_bubble_levelset_penalization.py" #LLVM bug for DP
 example_test "bubble/periodic_jet_levelset.py"
 example_test "particles_above_salt/particles_above_salt_periodic.py"
 example_test "particles_above_salt/particles_above_salt_symmetrized.py"
+
+
+# Tasks examples (parallel)
+MPIRUN_EXECUTABLE=${MPIRUN_EXECUTABLE:-mpirun}
+MPIRUN_TASKS_OPTION='-np'
+if [ "${MPIRUN_EXECUTABLE}" = "srun" ]; then MPIRUN_TASKS_OPTION='-n'; fi
+if [[ ${MPIRUN_EXECUTABLE} == *"mpirun"* ]]; then MPIRUN_TASKS_OPTION='--oversubscribe '${MPIRUN_TASKS_OPTION}; fi
+MPIRUN_FAIL_EARLY="-mca orte_abort_on_non_zero_status 1"
+MPIRUN_ARGS="${MPIRUN_FAIL_EARLY} ${MPIRUN_TASKS_OPTION} 2"
+COMMON_EXAMPLE_OPTIONS='-VNC -cp float -maxit 2 --autotuner-max-candidates 1 --save-checkpoint --checkpoint-dump-freq 0 --checkpoint-dump-period 0 --checkpoint-dump-last --checkpoint-dump-times'
+example_test() {
+     test=$1
+     echo
+     echo "EXAMPLE $1"
+     echo "========$(printf '=%.0s' `seq ${#1}`)"
+     ${PYTHON_EXECUTABLE} -Wd "${EXAMPLE_DIR}/${1}" -d16,16,32 -sd 32,32,64 ${COMMON_EXAMPLE_OPTIONS}
+     ${MPIRUN_EXECUTABLE} ${MPIRUN_ARGS} ${PYTHON_EXECUTABLE} -Wd "${EXAMPLE_DIR}/${1}" -d16,16,32 -sd 32,32,64 --proc-tasks '(111,222),(111,)'  ${COMMON_EXAMPLE_OPTIONS}
+     ${MPIRUN_EXECUTABLE} ${MPIRUN_ARGS} ${PYTHON_EXECUTABLE} -Wd "${EXAMPLE_DIR}/${1}" -d16,16,32 -sd 32,32,64 --proc-tasks '111,222' ${COMMON_EXAMPLE_OPTIONS}
+     echo
+}
+example_test "scalar_advection/turbulent_scalar_advection.py"
diff --git a/ci/utils/run_ci.sh b/ci/utils/run_ci.sh
index 816aa7d057fc98a884e90178570b094990f5f333..3a13a9683253b2a3abaf07b296a5a07f477023c7 100755
--- a/ci/utils/run_ci.sh
+++ b/ci/utils/run_ci.sh
@@ -22,11 +22,10 @@ set -feu -o pipefail
 SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
 DOCKER_IMAGE_TAG=${1:-ci_cpu_intel}
 HYSOP_REGISTRY_URL='gricad-registry.univ-grenoble-alpes.fr'
-DOCKER_IMG="${HYSOP_REGISTRY_URL}/particle_methods/hysop/${DOCKER_IMAGE_TAG}:latest"
 CONTAINER_ID='hysop_build_and_test'
 
-if [[ ${DOCKER_IMAGE_TAG} == *_nvidia ]]; then
-    EXTRA_ARGS="--gpus all"
+if [[ ${DOCKER_IMAGE_TAG} == *_nvidia* ]]; then
+    EXTRA_ARGS="--runtime=nvidia --gpus all"
 else
     EXTRA_ARGS=""
 fi
@@ -37,13 +36,8 @@ function remove_img() {
 }
 trap remove_img INT TERM EXIT KILL
 
-remove_img
-
-docker create --cap-add=SYS_PTRACE ${EXTRA_ARGS} -v "${SCRIPT_DIR}/../..:/hysop:ro" --name="${CONTAINER_ID}" -it "${DOCKER_IMG}"
-
+docker create --cap-add=SYS_PTRACE ${EXTRA_ARGS} --name="${CONTAINER_ID}" -it -v "${SCRIPT_DIR}/../..:/hysop:ro" "${HYSOP_REGISTRY_URL}/particle_methods/hysop/${DOCKER_IMAGE_TAG}:latest"
 docker start "${CONTAINER_ID}"
-
-docker exec "${CONTAINER_ID}" /hysop/ci/scripts/build_and_test.sh
-
-# on test success, upload hysop cache to the docker images
-docker commit "${CONTAINER_ID}" "${DOCKER_IMG}"
+# Interactive shell is asked to activate micromamba in shell. Interactivity is not needed
+docker exec -it "${CONTAINER_ID}" bash -i -c "MESON_TESTTHREADS=1 /hysop/ci/scripts/build_and_test.sh"
+docker exec -it "${CONTAINER_ID}" bash -i -c "MESON_TESTTHREADS=1 EXAMPLE_DIR=/hysop/hysop_examples/examples /hysop/ci/scripts/run_examples.sh"
diff --git a/docs/config/gen_content.sh.in b/docs/config/gen_content.sh.in
index 345d20bfe540fca3248710fd8578cd15ef3b4af4..2684cecb1d8a09b9d95ad34ef67f237bd7b5732e 100755
--- a/docs/config/gen_content.sh.in
+++ b/docs/config/gen_content.sh.in
@@ -7,11 +7,6 @@ doc_type=${1:-"html"}
 mkdir -p @SPHINX_OUTPUT@/tags
 mkdir -p @SPHINX_OUTPUT@/html
 
-# Doxygen
-mkdir -p @DOXYGEN_OUTPUT@
-@DOXYGEN_BIN@ @DOXYGEN_CONFIG@ > @DOC_BUILD_DIR@/doxygen.log
-
-## SPHINX - DOC
 ## Documentation files
 mkdir -p @SPHINX_TMP@
 for f in @SPNHINX_INPUT_FILES@
@@ -22,13 +17,18 @@ for dir in @SPNHINX_INPUT_SUBDIRS@
 do
     cp -r @SPHINX_INPUT_HYSOPDOC@/${dir} @SPHINX_TMP@/.
 done
+
+## SPHINX - DOC
 ## Complete the license.rst with actual LICENSE file content
 cat @SPHINX_INPUT_HYSOPDOC@/license.rst @HYSOP_SOURCE_DIR@/LICENSE > @SPHINX_TMP@/license.rst
+## Doxygen
+mkdir -p @DOXYGEN_OUTPUT@
+@DOXYGEN_BIN@ @DOXYGEN_CONFIG@ > @DOC_BUILD_DIR@/doxygen.log
 ## API
 rm -rf @SPHINX_TMP@/apidoc
 mkdir -p @SPHINX_TMP@/apidoc
 @SPHINX_API_BIN@  -e -P -o @SPHINX_TMP@/apidoc @SPHINX_INPUT_HYSOP@
 ## Fix the generated files : add path in Sphinx's conf.py seems not enough to find these hysop modules
-find @SPHINX_TMP@/apidoc -name '*.rst' -exec sed -i'' -E 's/(.. automodule:: )/\1hysop./g' '{}' \;
+# find @SPHINX_TMP@/apidoc -name '*.rst' -exec sed -i'' -E 's/(.. automodule:: )/\1hysop./g' '{}' \;
 ## Doc pages build
 @SPHINX_BIN@ -T -w @DOC_BUILD_DIR@/sphinx.log -b ${doc_type} -d @DOC_BUILD_DIR@/doctrees -c @SPHINX_CONFIG@  @SPHINX_TMP@  @SPHINX_OUTPUT@/${doc_type} > /dev/null 2>&1
diff --git a/docs/config/hysop.doxyfile.in b/docs/config/hysop.doxyfile.in
index f01913d6782065a2fcc40ed08889d78491c22c3d..88af7caf76526674b0e3b012f2448b05d95d0301 100644
--- a/docs/config/hysop.doxyfile.in
+++ b/docs/config/hysop.doxyfile.in
@@ -58,7 +58,7 @@ PROJECT_NAME           = @PROJECT_NAME@
 # could be handy for archiving the generated documentation or if some version
 # control system is used.
 
-PROJECT_NUMBER         = @PACKAGE_VERSION@
+PROJECT_NUMBER         = @HYSOP_VERSION@
 
 # Using the PROJECT_BRIEF tag one can provide an optional one line description
 # for a project that appears at the top of each page and should give viewer a
diff --git a/docs/config/meson.build b/docs/config/meson.build
index 00458114184e8a5afdb909060882070046901dd8..1a1f36c36da56686874059a5ae3e52b8b5254451 100644
--- a/docs/config/meson.build
+++ b/docs/config/meson.build
@@ -25,7 +25,7 @@ conf_doc_data = configuration_data({
   'GENERATE_XML': 'YES',
   'BIBTEXFILE': doc_bibtexfile,
   'HYSOP_COPYRIGHT': copyright.stdout().strip().replace('${years}',years.stdout().strip()),
-  'HYSOP_BUILD_PYTHONPATH':meson_build_dir,
+  'HYSOP_PYTHONPATH': hysop_pythonpath,
   'HYSOP_SOURCE_DIR' : project_dir,
 })
 
diff --git a/docs/meson.build b/docs/meson.build
index 18a2dc1c1472f0b5927845867fc7032bc87b753e..7f701bbaf6d86cc49936c361501e201af9795dcd 100644
--- a/docs/meson.build
+++ b/docs/meson.build
@@ -21,18 +21,21 @@ subdir('config')
 docs_gen_html_content = custom_target('docs',
                                       output: 'docs',
                                       capture: false,
-                                      command: [bash, doc_build_dir + '/config/gen_content.sh', 'html'])
+                                      command: [bash, doc_build_dir + '/config/gen_content.sh', 'html'],
+                                      env: docs_env)
 
 
 docs_gen_latex_content = custom_target('docs_gen_latex_content',
-                                      output: 'docs_gen_latex_content.done',
-                                      capture: false,
-                                      command: [bash, doc_build_dir + '/config/gen_content.sh', 'latex'],
-                                      depends: docs_gen_html_content)
+                                       output: 'docs_gen_latex_content.done',
+                                       capture: false,
+                                       command: [bash, doc_build_dir + '/config/gen_content.sh', 'latex'],
+                                       depends: docs_gen_html_content,
+                                       env: docs_env)
 
+docs_env.set('HYSOP_VERBOSE', 'false')
 docs_doctest = custom_target('doctest',
                              output: 'docs_doctest.done',
                              capture: false,
-                             env: ['HYSOP_VERBOSE=false', ],
+                             env: docs_env,
                              command: [sphinx, '-M', 'doctest', '-d', doc_build_dir + '/doctrees', '-c', doc_build_dir + '/config', doc_build_dir + '/sphinx', doc_build_dir + '/build/html'],
                              depends: docs_gen_html_content)
diff --git a/hysop/backend/device/opencl/opencl_symbolic.py b/hysop/backend/device/opencl/opencl_symbolic.py
index 6962050fb818a990431ccc4550dd8909c52d0910..3366de13a80dce590d382391d3ec947f46a70475 100644
--- a/hysop/backend/device/opencl/opencl_symbolic.py
+++ b/hysop/backend/device/opencl/opencl_symbolic.py
@@ -54,6 +54,7 @@ from hysop.backend.device.opencl.autotunable_kernels.custom_symbolic import (
     OpenClAutotunableCustomSymbolicKernel,
 )
 from hysop.tools.sympy_utils import subscript, subscripts
+from hysop.tools.parameters import MPIParams
 
 
 class OpenClSymbolic(OpenClOperator):
@@ -217,8 +218,8 @@ class OpenClSymbolic(OpenClOperator):
         check_instance(
             output_fields, dict, keys=ScalarField, values=CartesianTopologyDescriptors
         )
-        check_instance(input_params, dict, keys=str, values=Parameter)
-        check_instance(output_params, dict, keys=str, values=Parameter)
+        check_instance(input_params, dict, keys=Parameter, values=MPIParams)
+        check_instance(output_params, dict, keys=Parameter, values=MPIParams)
         check_instance(input_tensor_fields, tuple, values=Field)
         check_instance(output_tensor_fields, tuple, values=Field)
 
@@ -261,8 +262,18 @@ class OpenClSymbolic(OpenClOperator):
                     raise RuntimeError(msg)
             _cmp("input_fields", input_fields, expr_info.input_fields, exprs)
             _cmp("output_fields", output_fields, expr_info.output_fields, exprs)
-            _cmp("input_params", input_params, expr_info.input_params, exprs)
-            _cmp("output_params", output_params, expr_info.output_params, exprs)
+            _cmp(
+                "input_params",
+                dict((_.name, _) for _ in input_params.keys()),
+                expr_info.input_params,
+                exprs,
+            )
+            _cmp(
+                "output_params",
+                dict((_.name, _) for _ in output_params.keys()),
+                expr_info.output_params,
+                exprs,
+            )
             assert 0 <= self.cr <= expr_info.max_granularity, self.cr
             expr_info.compute_granularity = self.cr
             expr_info.time_integrator = self.time_integrator
diff --git a/hysop/backend/host/fortran/operator/diffusion.py b/hysop/backend/host/fortran/operator/diffusion.py
index 6ce82e9cc336251bb4f2625ded6dc057d84db10b..34c61208ca04343ea986c1d1d895aee8c9aa027b 100644
--- a/hysop/backend/host/fortran/operator/diffusion.py
+++ b/hysop/backend/host/fortran/operator/diffusion.py
@@ -80,9 +80,9 @@ class DiffusionFFTW(FortranFFTWOperator):
 
         input_fields = {Fin: variables[Fin]}
         output_fields = {Fout: variables[Fout]}
-        input_params = {dt.name: dt}
+        input_params = {dt}
         if isinstance(nu, ScalarParameter):
-            input_params[nu.name] = nu
+            input_params.update({nu})
         else:
             self._real_nu = nu
             nu = lambda: self._real_nu
diff --git a/hysop/backend/host/fortran/operator/scales_advection.py b/hysop/backend/host/fortran/operator/scales_advection.py
index 0eb9d12e4ddd90ab647c4347bd5472c03b6c17ed..ef5cd77bd86ed1f56c158879ccf10b6156037692 100644
--- a/hysop/backend/host/fortran/operator/scales_advection.py
+++ b/hysop/backend/host/fortran/operator/scales_advection.py
@@ -176,8 +176,8 @@ class ScalesAdvection(FortranOperator):
 
         input_fields = {velocity: variables[velocity]}
         output_fields = {}
-        input_params = {dt.name: dt}
-        output_params = {}
+        input_params = {dt}
+        output_params = set()
 
         is_inplace = True
         for ifield, ofield in zip(advected_fields_in, advected_fields_out):
diff --git a/hysop/backend/host/python/operator/analytic.py b/hysop/backend/host/python/operator/analytic.py
index 4851351d28ecf0381e3591284a9e4acc8cafa5d0..93edc950f3767e61c272a582f51ad31ad8fb1417 100644
--- a/hysop/backend/host/python/operator/analytic.py
+++ b/hysop/backend/host/python/operator/analytic.py
@@ -75,7 +75,7 @@ class PythonAnalyticField(HostOperator):
 
         input_fields = {}
         output_fields = {field: self.get_topo_descriptor(variables, field)}
-        input_params = {}
+        input_params = set()
 
         extra_kwds = {}
         map_fields = {}
@@ -84,7 +84,7 @@ class PythonAnalyticField(HostOperator):
                 input_fields[v] = self.get_topo_descriptor(variables, v)
                 map_fields[v] = k
             elif isinstance(v, Parameter):
-                input_params[k] = v
+                input_params.update({v})
                 extra_kwds[k] = v
             else:
                 extra_kwds[k] = v
diff --git a/hysop/backend/host/python/operator/flowrate_correction.py b/hysop/backend/host/python/operator/flowrate_correction.py
index fe3d6ebf4b653d9a1bcd1ac349dab47633cc40e1..beb3d6e13aeddf455b4e19a7b05184526063cf7a 100644
--- a/hysop/backend/host/python/operator/flowrate_correction.py
+++ b/hysop/backend/host/python/operator/flowrate_correction.py
@@ -76,7 +76,7 @@ class PythonFlowRateCorrection(HostOperator):
 
         input_fields = {velocity: variables[velocity], vorticity: variables[vorticity]}
         output_fields = {velocity: variables[velocity]}
-        input_params = {flowrate.name: flowrate}
+        input_params = {flowrate}
 
         self.velocity = velocity
         self.vorticity = vorticity
diff --git a/hysop/backend/host/python/operator/penalization.py b/hysop/backend/host/python/operator/penalization.py
index d74123582376c7cada1e6b902930cb986c0f023b..364e9c3c758c00136a93071462668b94471314cf 100644
--- a/hysop/backend/host/python/operator/penalization.py
+++ b/hysop/backend/host/python/operator/penalization.py
@@ -191,7 +191,7 @@ class PythonPenalizeVorticity(HostOperator, CommonPenalization):
 
         input_fields = {velocity: variables[velocity], vorticity: variables[vorticity]}
         output_fields = {vorticity: variables[vorticity]}
-        input_params = {dt.name: dt}
+        input_params = {dt}
         for o in obstacles.values() if isinstance(obstacles, dict) else obstacles:
             input_fields[o] = variables[o]
         if isinstance(ubar, Field):
@@ -407,7 +407,7 @@ class PythonPenalizeVelocity(HostOperator, CommonPenalization):
             velocity: variables[velocity],
         }
         output_fields = {velocity: variables[velocity]}
-        input_params = {dt.name: dt}
+        input_params = {dt}
         for o in obstacles.values() if isinstance(obstacles, dict) else obstacles:
             input_fields[o] = variables[o]
         if isinstance(ubar, Field):
diff --git a/hysop/backend/host/python/operator/vorticity_absorption.py b/hysop/backend/host/python/operator/vorticity_absorption.py
index 545553f14480957240907f8bd123ea6b5c25208e..cce9ec180eff7792fb0d2034626de51ec0fafbdf 100644
--- a/hysop/backend/host/python/operator/vorticity_absorption.py
+++ b/hysop/backend/host/python/operator/vorticity_absorption.py
@@ -102,7 +102,7 @@ class PythonVorticityAbsorption(HostOperator):
 
         input_fields = {velocity: variables[velocity], vorticity: variables[vorticity]}
         output_fields = {vorticity: variables[vorticity]}
-        input_params = {flowrate.name: flowrate}
+        input_params = {flowrate}
 
         self.velocity = velocity
         self.vorticity = vorticity
diff --git a/hysop/core/graph/computational_graph.py b/hysop/core/graph/computational_graph.py
index 0e4a8eb438839b0cf0bea193b0f510bf34459fcb..0ddc0d880d1cd08ff488d13ea4a9e8e3f7e16a55 100644
--- a/hysop/core/graph/computational_graph.py
+++ b/hysop/core/graph/computational_graph.py
@@ -376,10 +376,10 @@ class ComputationalGraph(ComputationalGraphNode, metaclass=ABCMeta):
                     )
                 )
                 pinputs = ",".join(
-                    sorted(p.pretty_name for p in op.input_params.values())
+                    sorted(p.pretty_name for p in op.input_params.keys())
                 )
                 poutputs = ",".join(
-                    sorted(p.pretty_name for p in op.output_params.values())
+                    sorted(p.pretty_name for p in op.output_params.keys())
                 )
                 infields = f"[{finputs}]" if finputs else ""
                 outfields = f"[{foutputs}]" if foutputs else ""
@@ -411,10 +411,10 @@ class ComputationalGraph(ComputationalGraphNode, metaclass=ABCMeta):
             operators = domains[None]
             for op in sorted(operators, key=lambda x: x.pretty_name):
                 pinputs = ",".join(
-                    sorted(p.pretty_name for p in op.input_params.values())
+                    sorted(p.pretty_name for p in op.input_params.keys())
                 )
                 poutputs = ",".join(
-                    sorted(p.pretty_name for p in op.output_params.values())
+                    sorted(p.pretty_name for p in op.output_params.keys())
                 )
                 inparams = f"[{pinputs}]" if pinputs else ""
                 outparams = f"[{poutputs}]" if poutputs else ""
@@ -617,10 +617,10 @@ class ComputationalGraph(ComputationalGraphNode, metaclass=ABCMeta):
                 foutputs = ",".join(sorted(foutputs))
 
                 pinputs = ",".join(
-                    sorted(p.pretty_name for p in node.input_params.values())
+                    sorted(p.pretty_name for p in node.input_params.keys())
                 )
                 poutputs = ",".join(
-                    sorted(p.pretty_name for p in node.output_params.values())
+                    sorted(p.pretty_name for p in node.output_params.keys())
                 )
 
                 infields = f"[{finputs}]" if finputs else ""
@@ -833,7 +833,6 @@ class ComputationalGraph(ComputationalGraphNode, metaclass=ABCMeta):
     @debug
     @not_initialized
     def push_nodes(self, *args):
-        from hysop.operators import InterTaskParamComm
         from hysop.problem import Problem
 
         nodes = ()
@@ -886,9 +885,7 @@ class ComputationalGraph(ComputationalGraphNode, metaclass=ABCMeta):
                 self._last_pushed_node_mpi_params
                 and self._last_pushed_node_mpi_params.task_id != mpi_params.task_id
             ):
-                if not isinstance(node, InterTaskParamComm) and not (
-                    isinstance(node, Problem) and node.search_intertasks_ops
-                ):
+                if not (isinstance(node, Problem) and node.search_intertasks_ops):
                     self.nodes.append(_hidden_node(node, mpi_params))
             self._last_pushed_node_mpi_params = _get_mpi_params(node)
             if mpi_params and not mpi_params.on_task:
diff --git a/hysop/core/graph/computational_node.py b/hysop/core/graph/computational_node.py
index 80beabf38aa69b332f77aac72255e322df031c40..e142dd0d887ad0f834680279a398c4b92972a0dc 100644
--- a/hysop/core/graph/computational_node.py
+++ b/hysop/core/graph/computational_node.py
@@ -29,6 +29,7 @@ from abc import ABCMeta, abstractmethod
 from hysop import dprint
 from hysop.tools.htypes import InstanceOf, to_set, check_instance, first_not_None
 from hysop.tools.io_utils import IOParams
+from hysop.tools.parameters import MPIParams
 from hysop.parameters.parameter import Parameter
 from hysop.fields.continuous_field import Field, ScalarField, TensorField
 from hysop.core.graph.node_requirements import NodeRequirements
@@ -134,9 +135,9 @@ class ComputationalGraphNode(OperatorBase, metaclass=ABCMeta):
         output_fields: dict, optional
             output fields as a dictionnary (see Notes).
         input_params: array like of hysop.parameters.Parameter or dict, optional (see Notes)
-            input parameters as a list or a dictionnary.
+            input parameters as a set or a dictionnary.
         output_params: array like of hysop.parameters.Parameter or dict, optional (see Notes)
-            output parameters as a list or a dictionnary.
+            output parameters as a set or a dictionnary.
         input_tensor_fields: tuple, optional
             input tensor fields as a tuple.
             If given, input_fields is assumed to contain only ScalarFields.
@@ -191,8 +192,7 @@ class ComputationalGraphNode(OperatorBase, metaclass=ABCMeta):
 
         VectorFields and TensorFields are expanded to ScalarFields.
 
-        For input and output parameters, the keys of the dicts can be arbitrary names that
-        can be used to retrieve the parameters
+        For input and output parameters, in case of dict, the key is the actual Parameter and the value must be an MPIParams, or None
 
         Giving the following keywords as inputs (in **kwds) will throw a ValueError:
             input_vars, output_vars, variables, iwork, rwork, work, backend
@@ -295,10 +295,10 @@ class ComputationalGraphNode(OperatorBase, metaclass=ABCMeta):
             raise ValueError(msg.format(output_fields.__class__))
         if not isinstance(input_params, dict):
             input_params = to_set(input_params)
-            input_params = {p.name: p for p in input_params}
+            input_params = {p: None for p in input_params}
         if not isinstance(output_params, dict):
             output_params = to_set(output_params)
-            output_params = {p.name: p for p in output_params}
+            output_params = {p: None for p in output_params}
 
         self.name = name
         self.pretty_name = pretty_name
@@ -436,8 +436,12 @@ class ComputationalGraphNode(OperatorBase, metaclass=ABCMeta):
         assert not self._base_initialized
         check_instance(input_fields, dict, keys=ScalarField)
         check_instance(output_fields, dict, keys=ScalarField)
-        check_instance(input_params, dict, keys=str, values=Parameter)
-        check_instance(output_params, dict, keys=str, values=Parameter)
+        check_instance(
+            input_params, dict, keys=Parameter, values=(MPIParams, type(None))
+        )
+        check_instance(
+            output_params, dict, keys=Parameter, values=(MPIParams, type(None))
+        )
         check_instance(input_tensor_fields, tuple, values=TensorField)
         check_instance(output_tensor_fields, tuple, values=TensorField)
         check_instance(all_input_fields, tuple, values=Field)
@@ -458,8 +462,8 @@ class ComputationalGraphNode(OperatorBase, metaclass=ABCMeta):
         otfields = set(self.output_tensor_fields)
         tfields = tuple(itfields.union(otfields))
 
-        iparams = set(self.input_params.values())
-        oparams = set(self.output_params.values())
+        iparams = set(self.input_params.keys())
+        oparams = set(self.output_params.keys())
         parameters = tuple(iparams.union(oparams))
 
         if ("mpi_params" in self.__kwds) and (
@@ -491,6 +495,16 @@ class ComputationalGraphNode(OperatorBase, metaclass=ABCMeta):
             parameters=parameters,
             **self.__kwds,
         )
+        # Consolidate unkwnown mpi_params for parameters.
+        for p in iparams:
+            if self.input_params[p] is None:
+                self.input_params[p] = self.mpi_params
+        for p in oparams:
+            if self.output_params[p] is None:
+                self.output_params[p] = self.mpi_params
+        # after consolidation : None value not allowed anymore
+        check_instance(self.input_params, dict, keys=Parameter, values=MPIParams)
+        check_instance(self.output_params, dict, keys=Parameter, values=MPIParams)
         self._base_initialized = True
         self.all_input_fields = all_input_fields
         self.all_output_fields = all_output_fields
@@ -1313,16 +1327,14 @@ class ComputationalGraphNode(OperatorBase, metaclass=ABCMeta):
         ss += "\n  INPUT PARAMS:{}"
         if self.input_params:
             ss = ss.format(
-                sep
-                + sep.join(f.short_description() for f in self.input_params.values())
+                sep + sep.join(f.short_description() for f in self.input_params.keys())
             )
         else:
             ss = ss.format(" None")
         ss += "\n  OUTPUT PARAMS:{}"
         if self.output_params:
             ss = ss.format(
-                sep
-                + sep.join(f.short_description() for f in self.output_params.values())
+                sep + sep.join(f.short_description() for f in self.output_params.keys())
             )
         else:
             ss = ss.format(" None")
diff --git a/hysop/core/graph/graph.py b/hysop/core/graph/graph.py
index aafaec415d3e1e6a0ca48978d7bf65eca508ca9a..57f5d72c866edd343661bb402de96cd80bec5a36 100644
--- a/hysop/core/graph/graph.py
+++ b/hysop/core/graph/graph.py
@@ -236,7 +236,7 @@ class VertexAttributes:
             f"{prefix}Rank:{suffix}{self.op_ordering}\n\n" if self.op_ordering else "",
             (
                 "{p}Pin:{s}{}\n".format(
-                    sep.join(ipinfo(param) for param in iparams.values()),
+                    sep.join(ipinfo(param) for param in iparams.keys()),
                     p=prefix,
                     s=suffix + "&nbsp&nbsp",
                 )
@@ -254,7 +254,7 @@ class VertexAttributes:
             ),
             (
                 "{p}Pout:{s}{}\n".format(
-                    sep.join([opinfo(param) for param in oparams.values()]),
+                    sep.join([opinfo(param) for param in oparams.keys()]),
                     p=prefix,
                     s=suffix,
                 )
@@ -466,7 +466,7 @@ def op_apply(f):
             _file = inspect.getsourcefile(f)
             _, _line = inspect.getsourcelines(f)
             description = f"{_file}:{_line}"
-            for param in sorted(op.input_params.values(), key=lambda x: x.name):
+            for param in sorted(op.input_params.keys(), key=lambda x: x.name):
                 tag = f"pre_{op.name}_{param.name}"
                 kwds["debug_dumper"](
                     it, t, tag, (param._value,), description=description
@@ -486,7 +486,7 @@ def op_apply(f):
                     description=description,
                 )
             ret = f(*args, **kwds)
-            for param in sorted(op.output_params.values(), key=lambda x: x.name):
+            for param in sorted(op.output_params.keys(), key=lambda x: x.name):
                 tag = f"post_{op.name}_{param.name}"
                 kwds["debug_dumper"](
                     it, t, tag, (param._value,), description=description
diff --git a/hysop/core/graph/graph_builder.py b/hysop/core/graph/graph_builder.py
index accc7b19f4c4d15c39f102600a84b73baa8d3822..2ed1e4eb9549eb70bc720a0d4715af04f77d5c54 100644
--- a/hysop/core/graph/graph_builder.py
+++ b/hysop/core/graph/graph_builder.py
@@ -20,6 +20,7 @@
 import numpy as np
 
 from hysop import vprint, dprint, Problem
+from hysop.fields.continuous_field import ScalarField
 from hysop.tools.htypes import check_instance, first_not_None
 from hysop.tools.io_utils import IOParams
 
@@ -50,9 +51,9 @@ from hysop.fields.field_requirements import (
 from hysop.operator.redistribute import (
     Redistribute,
     RedistributeInter,
+    RedistributeInterParam,
     RedistributeNotImplementedError,
 )
-from hysop.operator.inter_task_param_comm import PythonInterTaskParamComm
 from hysop.operator.transpose import Transpose, TranspositionNotImplementedError
 from hysop.operator.memory_reordering import (
     MemoryReordering,
@@ -226,19 +227,13 @@ class GraphBuilder:
                 field_requirements = op._field_requirements
 
             if isinstance(op, RedistributeInter) or isinstance(
-                op, PythonInterTaskParamComm
+                op, RedistributeInterParam
             ):
                 self._intertasks_exchanged = self._intertasks_exchanged.union(
-                    {_.name for _ in op.output_fields.keys()}
+                    {_.name for _ in list(op.output_fields) + list(output_params)}
                 )
                 self._intertasks_exchanged = self._intertasks_exchanged.union(
-                    {_.name for _ in op.input_fields.keys()}
-                )
-                self._intertasks_exchanged = self._intertasks_exchanged.union(
-                    set(op.output_params.keys())
-                )
-                self._intertasks_exchanged = self._intertasks_exchanged.union(
-                    set(op.input_params.keys())
+                    {_.name for _ in list(op.input_fields) + list(op.input_params)}
                 )
 
             if not isinstance(op, Problem) and not isinstance(op, RedistributeInter):
@@ -303,19 +298,19 @@ class GraphBuilder:
             # iterate over subgraph operator input parameters
             if iparams:
                 gprint("   >Input parameters")
-                for iparam in sorted(iparams.values(), key=lambda x: x.name):
+                for iparam in sorted(iparams.keys(), key=lambda x: x.name):
                     gprint(f"     *{iparam.short_description()}")
                     parameter_handler.handle_input_parameter(iparam, opnode)
                     if iparam.name not in output_params:
-                        input_params[iparam.name] = iparam
+                        input_params[iparam] = iparams[iparam]
 
             # iterate over subgraph operator output parameters
             if oparams:
                 gprint("   >Output parameters")
-                for oparam in sorted(oparams.values(), key=lambda x: x.name):
+                for oparam in sorted(oparams.keys(), key=lambda x: x.name):
                     gprint(f"     *{oparam.short_description()}")
                     parameter_handler.handle_output_parameter(oparam, opnode)
-                    output_params[oparam.name] = oparam
+                    output_params[oparam] = oparams[oparam]
 
             # iterate over subgraph operator input fields
             input_states = {}
@@ -377,7 +372,7 @@ class GraphBuilder:
                             (
                                 " on an unknown topology"
                                 if (otopo is None)
-                                else f".{otopo.pretty_tag}  t{otopo.task_id}"
+                                else f".{otopo.pretty_tag}"
                             ),
                         )
                     )
@@ -446,10 +441,10 @@ class GraphBuilder:
 
             # Find redistribute candidates
             available_names = {
-                _ if not hasattr(_, "name") else _.name for _ in available_elems.keys()
+                _.name for _ in available_elems.keys()
             } - self._intertasks_exchanged
             needed_names = {
-                _ if not hasattr(_, "name") else _.name for _ in needed_elems.keys()
+                _.name for _ in needed_elems.keys()
             } - self._intertasks_exchanged
             mgs = "  >[IT] Current task ({}) {} parameters and fields : {}"
             gprint(
@@ -477,17 +472,22 @@ class GraphBuilder:
                                     continue
                                 else:
                                     ot_needs.append(_n)
-                            ot_provide = can_provide = [
-                                _ for _ in ot_needs if _ in available_names
-                            ]
+                            can_provide = [_ for _ in ot_needs if _ in available_names]
+                            to_remove = []
                             for prov in can_provide:
-                                available_names[prov] = needed_elems[
-                                    _name_to_key(prov, needed_elems)
-                                ].task_id
-                            for _op in ot_provide:
-                                needed_names[_op] = available_elems[
-                                    _name_to_key(_op, available_elems)
-                                ].task_id
+                                ae = available_elems[
+                                    _name_to_key(prov, available_elems)
+                                ]
+                                ne = needed_elems[_name_to_key(prov, needed_elems)]
+                                if ae.task_id != ot and ne.task_id == ot:
+                                    available_names[prov] = ne.task_id
+                                    needed_names[prov] = ae.task_id
+                                else:
+                                    to_remove.append(prov)
+                            for rm in to_remove:
+                                can_provide.remove(rm)
+                                available_names[rm] = None
+                                needed_names[rm] = None
                         else:
                             comm.isend(
                                 list(needed_names.keys()),
@@ -500,12 +500,10 @@ class GraphBuilder:
                             can_provide = [_ for _ in ot_needs if _ in available_names]
                             for prov in can_provide:
                                 available_names[prov] = ot
-                                assert (
-                                    ot
-                                    != available_elems[
-                                        _name_to_key(prov, available_elems)
-                                    ].task_id
-                                )
+                                ae = available_elems[
+                                    _name_to_key(prov, available_elems)
+                                ]
+                                assert ot != ae.task_id
                             comm.isend(
                                 can_provide,
                                 dest=domain.task_root_in_parent(ot),
@@ -516,12 +514,8 @@ class GraphBuilder:
                             )
                             for _op in ot_provide:
                                 needed_names[_op] = ot
-                                assert (
-                                    ot
-                                    != needed_elems[
-                                        _name_to_key(_op, needed_elems)
-                                    ].task_id
-                                )
+                                ne = needed_elems[_name_to_key(_op, needed_elems)]
+                                assert ot != ne.task_id
                         if len(ot_needs) > 0:
                             msg += "\n   *Other task {} needs init for {}, we provide {}".format(
                                 ot,
@@ -555,54 +549,85 @@ class GraphBuilder:
                 f"  >[IT] Inter-tasks will send:to {available_names} and recieve:from {needed_names}"
             )
             # Get back the actual field or parameter
+            names_to_obj = {}
+            for p in available_names.keys():
+                names_to_obj[p] = _name_to_key(p, available_elems)
+            for p in needed_names.keys():
+                names_to_obj[p] = _name_to_key(p, needed_elems)
+            # group parameters with same other task
+            allp = []
+            tasks_to_name = {}
             for p in sorted(
                 set(available_names.keys()).union(set(needed_names.keys()))
             ):
+                t = (
+                    available_names[p]
+                    if p in available_names.keys()
+                    else needed_names[p]
+                )
+                if isinstance(names_to_obj[p], ScalarField):
+                    allp.append(
+                        [
+                            p,
+                        ]
+                    )
+                else:
+                    if t in tasks_to_name:
+                        tasks_to_name[t].append(p)
+                    else:
+                        tasks_to_name[t] = [
+                            p,
+                        ]
+            for params in tasks_to_name.values():
+                allp.append(params)
+            for p in sorted(allp):
                 kwargs = {}
                 s_topo, r_topo, comm_dir = (None,) * 3
-                ae, ne = tuple(
-                    _name_to_key(p, _) for _ in (available_elems, needed_elems)
-                )
-                if not ae is None and p in available_names:
-                    var = ae
-                    t = available_names[p]
-                    topo = available_elems[ae]
+                var = tuple(names_to_obj[_] for _ in p)
+                if p[0] in available_names:
+                    t = available_names[p[0]]
+                    topo = available_elems[var[0]]
                     comm_dir = "src"
                     s_topo = topo
-                if not ne is None and p in needed_names:
-                    var = ne
-                    t = needed_names[p]
-                    topo = needed_elems[ne]
+                if p[0] in needed_names:
+                    t = needed_names[p[0]]
+                    topo = needed_elems[var[0]]
                     comm_dir = "dest"
                     r_topo = topo
                 if not (s_topo is None or r_topo is None):
                     comm_dir = "src&dest"
                     t = None
                 assert not comm_dir is None
+                opname = "RI{}_{}{}{}_{}".format(
+                    comm_dir,
+                    "" if s_topo is None else s_topo.task_id,
+                    "to" if not s_topo is None and not r_topo is None else "",
+                    "" if r_topo is None else r_topo.task_id,
+                    ",".join(_.name for _ in var),
+                )
                 # Finalize init call
                 kwargs.update(
                     {
-                        "variable": var,
-                        "mpi_params": topo.mpi_params,
-                        "name": "RI{}_{}{}{}_{}".format(
-                            comm_dir,
-                            "" if s_topo is None else s_topo.id,
-                            "to" if not s_topo is None and not r_topo is None else "",
-                            "" if r_topo is None else r_topo.id,
-                            var.name,
-                        ),
-                        "pretty_name": "RI{}_{}{}{}_{}".format(
-                            comm_dir,
-                            "" if s_topo is None else subscript(s_topo.id),
-                            "\u2192",
-                            "" if r_topo is None else subscript(r_topo.id),
-                            var.pretty_name,
-                        ),
+                        "name": opname,
                         "source_topo": s_topo,
                         "target_topo": r_topo,
                         "other_task_id": t,
                     }
                 )
+                if isinstance(var[0], ScalarField):
+                    kwargs.update(
+                        {
+                            "variable": var[0],
+                            "mpi_params": topo.mpi_params,
+                        }
+                    )
+                else:
+                    kwargs.update(
+                        {
+                            "parameter": var,
+                            "domain": domain,
+                        }
+                    )
                 yield kwargs
 
         # iterate over ComputationalNodes
@@ -645,18 +670,26 @@ class GraphBuilder:
                     for it_redistribute_kwargs in __find_elements_to_redistribute(
                         available_elems, needed_elems
                     ):
-                        assert RedistributeInter.can_redistribute(
-                            *tuple(
-                                it_redistribute_kwargs[_]
-                                for _ in ("source_topo", "target_topo", "other_task_id")
-                            )
-                        ), str(it_redistribute_kwargs)
+                        if "variable" in it_redistribute_kwargs.keys():
+                            assert RedistributeInter.can_redistribute(
+                                *tuple(
+                                    it_redistribute_kwargs[_]
+                                    for _ in (
+                                        "source_topo",
+                                        "target_topo",
+                                        "other_task_id",
+                                    )
+                                )
+                            ), str(it_redistribute_kwargs)
                         if op.fake_init:
                             op.__init__(**it_redistribute_kwargs)
                             # Recompute fields requirements since no fields were given in first fake operator creation
                             first_op, first_opnode = op, opnode
                         else:
-                            op = RedistributeInter(**it_redistribute_kwargs)
+                            if "variable" in it_redistribute_kwargs.keys():
+                                op = RedistributeInter(**it_redistribute_kwargs)
+                            else:
+                                op = RedistributeInterParam(**it_redistribute_kwargs)
                             target_node.nodes.insert(
                                 target_node.nodes.index(first_op), op
                             )
@@ -674,12 +707,14 @@ class GraphBuilder:
                             )
                             opvertex = node_vertices[0]
                             opnode = new_vertex(graph, op)
-                            cstate = self.topology_states.setdefault(
-                                op.variable, self.new_topology_state(op.variable)
-                            )
+                            if isinstance(op, RedistributeInter):
+                                cstate = self.topology_states.setdefault(
+                                    op.variable, self.new_topology_state(op.variable)
+                                )
                             node = op
-                        op.initialize(topgraph_method=self.target_node.method)
-                        op.get_and_set_field_requirements()
+                        if isinstance(op, RedistributeInter):
+                            op.initialize(topgraph_method=self.target_node.method)
+                            op.get_and_set_field_requirements()
                         __handle_node(
                             node_id,
                             node,
@@ -692,7 +727,7 @@ class GraphBuilder:
                             opnode,
                         )
                         node_id += 1
-                    if op.fake_init:
+                    if isinstance(op, RedistributeInter) and op.fake_init:
                         # Delete node because nothing has to be exchanged
                         target_node.nodes.remove(op)
                         graph.remove_node(opnode)
@@ -727,7 +762,7 @@ class GraphBuilder:
                             f": {ireqs}" if GRAPH_BUILDER_DEBUG_LEVEL == 2 else "",
                         )
                 if len(self.input_params) > 0:
-                    for iparam in sorted(self.input_params):
+                    for iparam in sorted(ip.name for ip in self.input_params):
                         msg += f"  *Parameter {iparam}\n"
             msg += f"ComputationalGraph {target_node.name} outputs {comment}:\n"
             if not self.output_fields and not self.output_params:
@@ -743,7 +778,7 @@ class GraphBuilder:
                             f": {oreqs}" if GRAPH_BUILDER_DEBUG_LEVEL == 2 else "",
                         )
                 if len(self.output_params) > 0:
-                    for oparam in sorted(self.output_params):
+                    for oparam in sorted(op.name for op in self.output_params):
                         msg += f"  *Parameter {oparam}\n"
 
             msg += "\n"
@@ -914,43 +949,18 @@ class GraphBuilder:
             available_elems.update(self.output_params)
 
             # Find redistribute candidates
-            mgs = "  >[IT] Current task {} parameters and fields : {}"
-            gprint(
-                mgs.format(
-                    "can communicate",
-                    ", ".join(
-                        {
-                            _ if not hasattr(_, "name") else _.name
-                            for _ in available_elems.keys()
-                        }
-                        - self._intertasks_exchanged
-                    ),
-                )
-            )
-            gprint(
-                mgs.format(
-                    "needs",
-                    ", ".join(
-                        {
-                            _ if not hasattr(_, "name") else _.name
-                            for _ in needed_elems.keys()
-                        }
-                        - self._intertasks_exchanged
-                    ),
-                )
-            )
-
-            for _k in available_elems:
-                if _k in needed_elems.keys():
-                    needed_elems.pop(_k)
             for it_redistribute_kwargs in __find_elements_to_redistribute(
                 available_elems, needed_elems
             ):
                 if it_redistribute_kwargs:
-                    node = RedistributeInter(**it_redistribute_kwargs)
+                    if "variable" in it_redistribute_kwargs.keys():
+                        node = RedistributeInter(**it_redistribute_kwargs)
+                    else:
+                        node = RedistributeInterParam(**it_redistribute_kwargs)
                     node_id = len(target_node.nodes)
                     target_node.push_nodes(node)
-                    node.initialize()
+                    if isinstance(node, RedistributeInter):
+                        node.initialize()
                     gprint(
                         " >Handling node {}: {} {}".format(
                             node_id, node.name, node.__class__
@@ -1050,6 +1060,15 @@ class GraphBuilder:
             node_operators = node.operators()
             node_ops.extend(node_operators)
             node_vertices += [None] * len(node_operators)
+        elif isinstance(node, RedistributeInterParam):
+            node_ops.extend(
+                [
+                    node,
+                ]
+            )
+            node_vertices += [
+                None,
+            ]
         elif node.mpi_params is None or node.mpi_params.on_task:
             if isinstance(node, Problem):
                 node._build_graph(
diff --git a/hysop/core/mpi/redistribute.py b/hysop/core/mpi/redistribute.py
index 38e15aea314ad9b9afc2b19f1ffbf52d4efe3ea3..6086c557199ba001909bf348f5f3316c425e18f3 100644
--- a/hysop/core/mpi/redistribute.py
+++ b/hysop/core/mpi/redistribute.py
@@ -35,6 +35,7 @@ redistribute deployment.
 """
 
 from hashlib import sha1
+import numpy as np
 from hysop.constants import Backend, DirectionLabels, MemoryOrdering
 from hysop.tools.htypes import check_instance, to_set, first_not_None
 from hysop.tools.decorators import debug
@@ -45,8 +46,10 @@ from hysop.topology.topology_descriptor import TopologyDescriptor
 from hysop.core.mpi.topo_tools import TopoTools
 from hysop.core.mpi.bridge import Bridge, BridgeOverlap, BridgeInter
 from hysop.operator.base.redistribute_operator import RedistributeOperatorBase
+from hysop.core.graph.computational_operator import ComputationalGraphOperator
 from hysop.core.graph.graph import op_apply
-from hysop import MPI
+from hysop import MPI, MPIParams
+from hysop.parameters.scalar_parameter import ScalarParameter, TensorParameter
 
 DEBUG_REDISTRIBUTE = 0
 
@@ -798,3 +801,127 @@ class RedistributeInter(RedistributeOperatorBase):
                         tkind=Backend.OPENCL,
                     )
             self.dFout.exchange_ghosts()
+
+
+class RedistributeInterParam(ComputationalGraphOperator):
+    """parameter transfer between two operators/topologies.
+    Source and target must:
+     *be MPIParams defined on different communicators
+    """
+
+    @classmethod
+    def supports_mpi(cls):
+        return True
+
+    def __new__(
+        cls, parameter, source_topo, target_topo, other_task_id, domain, **kwds
+    ):
+        return super().__new__(cls, **kwds)
+
+    def __init__(
+        self, parameter, source_topo, target_topo, other_task_id, domain, **kwds
+    ):
+        """
+        Communicate parameter through tasks
+
+        parameter
+        ----------
+        parameter: tuple of ScalarParameter or TensorParameter
+            parameters to communicate
+        source_topo: MPIParam
+        target_topo: MPIParam
+        """
+        check_instance(parameter, tuple, values=(ScalarParameter, TensorParameter))
+        check_instance(source_topo, MPIParams, allow_none=True)
+        check_instance(target_topo, MPIParams, allow_none=True)
+        input_fields, output_fields = {}, {}
+        input_params, output_params = {}, {}
+        assert not (source_topo is None and target_topo is None)
+        if not source_topo is None and source_topo.on_task:
+            input_params = {p: source_topo for p in parameter}
+        if not target_topo is None and target_topo.on_task:
+            output_params = {p: target_topo for p in parameter}
+        super().__init__(
+            mpi_params=first_not_None(source_topo, target_topo),
+            input_params=input_params,
+            output_params=output_params,
+            input_fields=input_fields,
+            output_fields=output_fields,
+            **kwds,
+        )
+        self.initialized = True
+        self.domain = domain
+        self.source_task = other_task_id if source_topo is None else source_topo.task_id
+        self.target_task = other_task_id if target_topo is None else target_topo.task_id
+        self.task_is_source = domain.is_on_task(self.source_task)
+        self.task_is_target = domain.is_on_task(self.target_task)
+        if self.task_is_source:
+            assert source_topo.on_task
+        if self.task_is_target:
+            assert target_topo.on_task
+        self.inter_comm = domain.task_intercomm(
+            self.target_task if self.task_is_source else self.source_task
+        )
+        if self.inter_comm.is_inter:
+            # Disjoint tasks with real inter-communicator
+            self._the_apply = self._apply_intercomm
+        elif self.inter_comm.is_intra:
+            # Overlapping tasks using an intra-communicator fron union of tasks procs
+            self._the_apply = self._apply_intracomm
+
+        self._all_params_by_type = {}
+        for p in sorted(self.parameters, key=lambda _: _.name):
+            if not p.dtype in self._all_params_by_type:
+                self._all_params_by_type[p.dtype] = []
+            self._all_params_by_type[p.dtype].append(p)
+        self._send_temp_by_type = {
+            t: np.zeros((len(self._all_params_by_type[t]),), dtype=t)
+            for t in self._all_params_by_type.keys()
+        }
+        self._recv_temp_by_type = {
+            t: np.zeros((len(self._all_params_by_type[t]),), dtype=t)
+            for t in self._all_params_by_type.keys()
+        }
+
+    @op_apply
+    def apply(self, **kwds):
+        self._the_apply(**kwds)
+
+    def _apply_intercomm(self, **kwds):
+        """Disjoint tasks so inter-comm bcast is needed."""
+        for t in self._all_params_by_type.keys():
+            if self.task_is_source:
+                self._send_temp_by_type[t][...] = [
+                    p() for p in self._all_params_by_type[t]
+                ]
+                self.inter_comm.bcast(
+                    self._send_temp_by_type[t],
+                    root=MPI.ROOT if self.domain.task_rank() == 0 else MPI.PROC_NULL,
+                )
+            if self.task_is_target:
+                self._recv_temp_by_type[t] = self.inter_comm.bcast(
+                    self._send_temp_by_type[t], root=0
+                )
+                for p, v in zip(
+                    self._all_params_by_type[t], self._recv_temp_by_type[t]
+                ):
+                    p.value = v
+
+    def _apply_intracomm(self, **kwds):
+        """Communicator is an intra-communicator defined as tasks' comm union.
+        Single broadcast is enough.
+        """
+        for t in self._all_params_by_type.keys():
+            if self.task_is_source and self.domain.task_rank() == 0:
+                self._send_temp_by_type[t][...] = [
+                    p() for p in self._all_params_by_type[t]
+                ]
+            self._recv_temp_by_type[t] = self.inter_comm.bcast(
+                self._send_temp_by_type[t],
+                self.domain.task_root_in_parent(self.source_task),
+            )
+            if self.task_is_target:
+                for p, v in zip(
+                    self._all_params_by_type[t], self._recv_temp_by_type[t]
+                ):
+                    p.value = v
diff --git a/hysop/core/tests/test_checkpoint.sh b/hysop/core/tests/test_checkpoint.sh
index ffa9194c5d3c03c7a2e9f8c59ef72c041f369e96..d2ab000b183336c974a92871548104d00d0f6607 100755
--- a/hysop/core/tests/test_checkpoint.sh
+++ b/hysop/core/tests/test_checkpoint.sh
@@ -24,7 +24,7 @@ MPIRUN_EXECUTABLE=${MPIRUN_EXECUTABLE:-mpirun}
 MPIRUN_ARGS='-np'
 MPIRUN_FAIL_EARLY="-mca orte_abort_on_non_zero_status 1"
 if [ "${MPIRUN_EXECUTABLE}" = "srun" ]; then MPIRUN_ARGS='-n'; fi
-if [ "${MPIRUN_EXECUTABLE}" = "mpirun" ]; then MPIRUN_ARGS='--oversubscribe '${MPIRUN_ARGS}; fi
+if [[ ${MPIRUN_EXECUTABLE} == *"mpirun"* ]]; then MPIRUN_ARGS='--oversubscribe '${MPIRUN_ARGS}; fi
 MPIRUN_ARGS="${MPIRUN_FAIL_EARLY} ${MPIRUN_ARGS}"
 
 SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
diff --git a/hysop/core/tests/test_tasks.sh b/hysop/core/tests/test_tasks.sh
index fcae4e9ab18781ff179a7482cea0b57b1074d9a6..2efc8b7308d517855738e7813fb428565cd06e7a 100755
--- a/hysop/core/tests/test_tasks.sh
+++ b/hysop/core/tests/test_tasks.sh
@@ -23,7 +23,7 @@ PYTHON_EXECUTABLE=${PYTHON_EXECUTABLE:-python3}
 MPIRUN_EXECUTABLE=${MPIRUN_EXECUTABLE:-mpirun}
 MPIRUN_TASKS_OPTION='-np'
 if [ "${MPIRUN_EXECUTABLE}" = "srun" ]; then MPIRUN_TASKS_OPTION='-n'; fi
-if [ "${MPIRUN_EXECUTABLE}" = "mpirun" ]; then MPIRUN_TASKS_OPTION='--oversubscribe '${MPIRUN_TASKS_OPTION}; fi
+if [[ ${MPIRUN_EXECUTABLE} == *"mpirun"* ]]; then MPIRUN_TASKS_OPTION='--oversubscribe '${MPIRUN_TASKS_OPTION}; fi
 MPIRUN_FAIL_EARLY="-mca orte_abort_on_non_zero_status 1"
 MPIRUN_ARGS="${MPIRUN_FAIL_EARLY} ${MPIRUN_TASKS_OPTION} 4"
 
diff --git a/hysop/operator/adapt_timestep.py b/hysop/operator/adapt_timestep.py
index 45e07aaddeb59eba96f5a12de5c91cf3d95296b0..b78d24eb5f61881bf02840e1b887392b8bc6e615 100644
--- a/hysop/operator/adapt_timestep.py
+++ b/hysop/operator/adapt_timestep.py
@@ -32,6 +32,7 @@ from hysop.core.graph.computational_operator import ComputationalGraphOperator
 from hysop.core.graph.graph import op_apply
 from hysop.fields.continuous_field import Field
 from hysop.parameters import ScalarParameter, TensorParameter
+from hysop.parameters.parameter import Parameter
 from hysop.core.mpi import MPI
 from hysop.backend.host.host_operator import HostOperatorBase
 
@@ -71,10 +72,10 @@ class TimestepCriteria(HostOperatorBase, metaclass=ABCMeta):
         ----------
         parameter: ScalarParameter
             Timestep parameter that will be updated.
-        input_params: dict
-            Input parameters used to compute criteria.
-        output_params: dict
+        input_params: set
             Input parameters used to compute criteria.
+        output_params: set
+            Output parameters used to compute criteria.
         min_dt : float, optional
             Minimum value allowed for time step, defaults to 0.
         max_dt : float, optional
@@ -87,13 +88,13 @@ class TimestepCriteria(HostOperatorBase, metaclass=ABCMeta):
             Base class arguments.
         """
         check_instance(parameter, ScalarParameter)
-        check_instance(input_params, dict, keys=str)
-        check_instance(output_params, dict, keys=str)
+        check_instance(input_params, set, values=Parameter)
+        check_instance(output_params, set, values=Parameter)
         assert (min_dt is None) or (min_dt > 0.0)
         assert (max_dt is None) or (max_dt > 0.0)
         assert (min_dt is None) or (max_dt is None) or (max_dt >= min_dt)
         assert (dt_coeff is None) or (dt_coeff > 0.0)
-        assert parameter.name in output_params
+        assert parameter in output_params
 
         super().__init__(input_params=input_params, output_params=output_params, **kwds)
 
@@ -306,7 +307,7 @@ class CflTimestepCriteria(TimestepCriteria):
             check_instance(Fmin, TensorParameter)
             check_instance(Fmax, TensorParameter)
             assert Fmin.shape == Fmax.shape
-            input_params = {Fmin.name: Fmin, Fmax.name: Fmax}
+            input_params = {Fmin, Fmax}
             dtype = Fmin.dtype
             shape = Fmin.shape
             size = Fmin.size
@@ -315,7 +316,7 @@ class CflTimestepCriteria(TimestepCriteria):
             msg = "Cannot specify (Fmin,Fmax) and Finf at the same time."
             assert Fmin is None, msg
             assert Fmax is None, msg
-            input_params = {Finf.name: Finf}
+            input_params = {Finf}
             dtype = Finf.dtype
             shape = Finf.shape
             size = Finf.size
@@ -330,7 +331,7 @@ class CflTimestepCriteria(TimestepCriteria):
             name=name,
             pretty_name=pretty_name,
             input_params=input_params,
-            output_params={parameter.name: parameter},
+            output_params={parameter},
             parameter=parameter,
             **kwds,
         )
@@ -476,13 +477,13 @@ class AdvectionTimestepCriteria(TimestepCriteria):
         check_instance(gradFinf, TensorParameter, allow_none=True)
         check_instance(parameter, ScalarParameter)
         check_instance(criteria, AdvectionCriteria)
-        input_params = {}
+        input_params = set()
         if Finf is not None:
             assert Finf().ndim == 1, "Finf should be a 1D tensor parameter."
-            input_params[Finf.name] = Finf
+            input_params.update({Finf})
         if gradFinf is not None:
             assert gradFinf().ndim == 2, "gradFinf should be a 2D tensor parameter."
-            input_params[gradFinf.name] = gradFinf
+            input_params.update({gradFinf})
 
         name = first_not_None(name, "LCFL")
         pretty_name = first_not_None(pretty_name, name)
@@ -490,7 +491,7 @@ class AdvectionTimestepCriteria(TimestepCriteria):
             name=name,
             pretty_name=pretty_name,
             input_params=input_params,
-            output_params={parameter.name: parameter},
+            output_params={parameter},
             parameter=parameter,
             **kwds,
         )
@@ -591,8 +592,8 @@ class StretchingTimestepCriteria(TimestepCriteria):
         super().__init__(
             name=name,
             pretty_name=pretty_name,
-            input_params={gradFinf.name: gradFinf},
-            output_params={parameter.name: parameter},
+            input_params={gradFinf},
+            output_params={parameter},
             parameter=parameter,
             **kwds,
         )
@@ -639,8 +640,8 @@ class MergeTimeStepCriterias(TimestepCriteria):
         check_instance(parameter, ScalarParameter)
         check_instance(criterias, dict, keys=str, values=TimestepCriteria)
         check_instance(equivalent_CFL, ScalarParameter, allow_none=True)
-        output_params = {parameter.name: parameter}
-        input_params = {}
+        output_params = {parameter}
+        input_params = set()
         for criteria in criterias.values():
             input_params.update(criteria.output_params)
 
@@ -657,7 +658,7 @@ class MergeTimeStepCriterias(TimestepCriteria):
         )
 
     def compute_criteria(self, **kwds):
-        dt = min(p.value for p in self.input_params.values())
+        dt = min(p.value for p in self.input_params.keys())
         if self.equivalent_CFL is not None:
             cfl = self.cfl_criteria.compute_cfl(dt)
             self.equivalent_CFL.set_value(cfl)
diff --git a/hysop/operator/base/advection_dir.py b/hysop/operator/base/advection_dir.py
index 460dbdb7bf9583ba327b9951c056230d60c19b34..88510413e0175e3edb15a14dac919653d1c05c33 100644
--- a/hysop/operator/base/advection_dir.py
+++ b/hysop/operator/base/advection_dir.py
@@ -218,8 +218,8 @@ class DirectionalAdvectionBase:
 
         input_fields = {Vd: variables[velocity]}
         output_fields = {}
-        input_params = {dt.name: dt}
-        output_params = {}
+        input_params = {dt}
+        output_params = set()
 
         for ifield, ofield in zip(advected_fields_in, advected_fields_out):
             input_fields[ifield] = variables[ifield]
diff --git a/hysop/operator/base/convergence.py b/hysop/operator/base/convergence.py
index fc034f0a958b30bb428d1d03244a6cb7b4a5319d..1ad36456dc9dd78748cd9fad87683b3d99431734 100644
--- a/hysop/operator/base/convergence.py
+++ b/hysop/operator/base/convergence.py
@@ -93,7 +93,7 @@ class ConvergenceBase:
             )
 
         input_fields = {field: variables[field]}
-        output_params = {convergence.name: convergence}
+        output_params = {convergence}
 
         self.field = field
         self.convergence = convergence
diff --git a/hysop/operator/base/custom.py b/hysop/operator/base/custom.py
index 7634add66257d10f9b925b2757ed6c6863d57745..ef3f3073ea2cade8eb1ec9099612d0453e9aa7f0 100644
--- a/hysop/operator/base/custom.py
+++ b/hysop/operator/base/custom.py
@@ -28,21 +28,44 @@ from hysop.core.graph.graph import op_apply
 class CustomOperatorBase:
 
     @debug
-    def __new__(cls, func, invars=None, outvars=None,
-                extra_args=None, variables=None, ghosts=None,
-                do_update_ghosts=True, **kwds):
-        return super().__new__(cls,
-                               input_fields=None, output_fields=None,
-                               input_params=None, output_params=None,
-                               **kwds)
+    def __new__(
+        cls,
+        func,
+        invars=None,
+        outvars=None,
+        extra_args=None,
+        variables=None,
+        ghosts=None,
+        do_update_ghosts=True,
+        **kwds,
+    ):
+        return super().__new__(
+            cls,
+            input_fields=None,
+            output_fields=None,
+            input_params=None,
+            output_params=None,
+            **kwds,
+        )
 
     @debug
-    def __init__(self, func, invars=None, outvars=None,
-                 extra_args=None, variables=None, ghosts=None, do_update_ghosts=True, **kwds):
-        check_instance(invars, (tuple, list), values=(Field, Parameter),
-                       allow_none=True)
-        check_instance(outvars, (tuple, list), values=(Field, Parameter),
-                       allow_none=True)
+    def __init__(
+        self,
+        func,
+        invars=None,
+        outvars=None,
+        extra_args=None,
+        variables=None,
+        ghosts=None,
+        do_update_ghosts=True,
+        **kwds,
+    ):
+        check_instance(
+            invars, (tuple, list), values=(Field, Parameter), allow_none=True
+        )
+        check_instance(
+            outvars, (tuple, list), values=(Field, Parameter), allow_none=True
+        )
         check_instance(extra_args, tuple, allow_none=True)
         check_instance(
             variables,
@@ -52,21 +75,24 @@ class CustomOperatorBase:
             allow_none=True,
         )
         check_instance(ghosts, int, allow_none=True)
-        check_instance(do_update_ghosts, bool,)
+        check_instance(
+            do_update_ghosts,
+            bool,
+        )
         input_fields, output_fields = {}, {}
-        input_params, output_params = {}, {}
+        input_params, output_params = set(), set()
         if invars is not None:
             for v in invars:
                 if isinstance(v, Field):
                     input_fields[v] = variables[v]
                 elif isinstance(v, Parameter):
-                    input_params[v.name] = v
+                    input_params.update({v})
         if outvars is not None:
             for v in outvars:
                 if isinstance(v, Field):
                     output_fields[v] = variables[v]
                 elif isinstance(v, Parameter):
-                    output_params[v.name] = v
+                    output_params.update({v})
         self.invars, self.outvars = invars, outvars
         self.func = func
         self.extra_args = tuple()
diff --git a/hysop/operator/base/custom_symbolic_operator.py b/hysop/operator/base/custom_symbolic_operator.py
index 617c8e19ea3da925a30db9e4e4e4e9bef24da538..f0c3bb724ef2e23532b83ba898bf5ca36c4c6621 100644
--- a/hysop/operator/base/custom_symbolic_operator.py
+++ b/hysop/operator/base/custom_symbolic_operator.py
@@ -1630,8 +1630,8 @@ class CustomSymbolicOperatorBase(DirectionalOperatorBase, metaclass=ABCMeta):
 
         input_fields = expr_info.input_fields
         output_fields = expr_info.output_fields
-        input_params = expr_info.input_params
-        output_params = expr_info.output_params
+        input_params = set(expr_info.input_params.values())
+        output_params = set(expr_info.output_params.values())
 
         input_tensor_fields = ()
         output_tensor_fields = ()
diff --git a/hysop/operator/base/derivative.py b/hysop/operator/base/derivative.py
index 432227bb253dc1e23150da93a95be2199ff7ade1..0cba46286b222fbcce0c718666e545d77652cf77 100644
--- a/hysop/operator/base/derivative.py
+++ b/hysop/operator/base/derivative.py
@@ -180,7 +180,7 @@ class SpaceDerivativeBase(metaclass=ABCMeta):
 
         input_fields = {F: variables.get(F, None)}
         output_fields = {dF: variables.get(dF, input_fields[F])}
-        input_params = {}
+        input_params = set()
 
         is_inplace = dF is F
         require_tmp = first_not_None(require_tmp, is_inplace)
@@ -190,7 +190,7 @@ class SpaceDerivativeBase(metaclass=ABCMeta):
             input_fields[A] = variables.get(A, input_fields[F])
             scale_by_field = True
         elif isinstance(A, TensorParameter):
-            input_params[A.name] = A
+            input_params.update({A})
             scale_by_parameter = True
         elif isinstance(A, (float, int, npw.number, sm.Basic)):
             scale_by_value = (A != 1) and (A != 1.0)
diff --git a/hysop/operator/base/diffusion.py b/hysop/operator/base/diffusion.py
index 7a11ba7b06a5b93efb7b7d25767c9fe59ee8d859..e246347e06227bcf604d545a5dcdd7196a269cf9 100644
--- a/hysop/operator/base/diffusion.py
+++ b/hysop/operator/base/diffusion.py
@@ -79,7 +79,7 @@ class DiffusionOperatorBase(PoissonOperatorBase):
         check_instance(nu, ScalarParameter)
         check_instance(dt, ScalarParameter)
 
-        input_params = {dt.name: dt, nu.name: nu}
+        input_params = {dt, nu}
 
         default_name = f"Diffusion_{Fin.name}_{Fout.name}"
         default_pretty_name = f"Diffusion_{Fin.pretty_name}_{Fout.pretty_name}"
diff --git a/hysop/operator/base/enstrophy.py b/hysop/operator/base/enstrophy.py
index 7dbe6ec28e393ea2f255b7f9737e7eb2f71399d2..7611d6d62fcd32d3dd4c8fa8b8284e85cc093161 100644
--- a/hysop/operator/base/enstrophy.py
+++ b/hysop/operator/base/enstrophy.py
@@ -112,7 +112,7 @@ class EnstrophyBase(metaclass=ABCMeta):
 
         input_fields = {vorticity: variables[vorticity]}
         output_fields = {WdotW: variables[WdotW]}
-        output_params = {enstrophy.name: enstrophy}
+        output_params = {enstrophy}
 
         if rho is not None:
             input_fields[rho] = variables[rho]
diff --git a/hysop/operator/base/external_force.py b/hysop/operator/base/external_force.py
index a92f45832bab6bf0838a512834a3794bf2056b2c..100fa540a7ed0e874ce0926d2341524793edf649 100644
--- a/hysop/operator/base/external_force.py
+++ b/hysop/operator/base/external_force.py
@@ -263,8 +263,6 @@ class SpectralExternalForceOperatorBase(SpectralOperatorBase):
         output_fields = {
             f: self.get_topo_descriptor(variables, f) for f in output_fields
         }
-        input_params = {p.name: p for p in input_params}
-        output_params = {p.name: p for p in output_params}
 
         # TODO share tmp buffers for the whole tensor
         force = vorticity.tmp_like(name="Fext", ghosts=0, mem_tag="tmp_fext")
diff --git a/hysop/operator/base/integrate.py b/hysop/operator/base/integrate.py
index 220235fe617b81d4b6502a40fd67c6a631ff284c..3cfb8f298dd121de92488e9884a55c8f8b74b72b 100644
--- a/hysop/operator/base/integrate.py
+++ b/hysop/operator/base/integrate.py
@@ -126,7 +126,7 @@ class IntegrateBase(metaclass=ABCMeta):
             check_instance(scaling, tuple, values=float, size=field.nb_components)
 
         input_fields = {field: variables[field]}
-        output_params = {parameter.name: parameter}
+        output_params = {parameter}
 
         default_name = f"integrate_{field.name}"
         default_pname = f"∫{field.pretty_name}"
diff --git a/hysop/operator/base/min_max.py b/hysop/operator/base/min_max.py
index 00938cb9a6d89f49cdfe37d9b903df1836dcc416..ab5998b54065b6b46834b4700a72da0d97b2ddc9 100644
--- a/hysop/operator/base/min_max.py
+++ b/hysop/operator/base/min_max.py
@@ -280,7 +280,7 @@ class MinMaxFieldStatisticsBase:
             ppbasename=ppbasename,
         )
 
-        output_params = {p.name: p for p in parameters.values() if (p is not None)}
+        output_params = set(p for p in parameters.values() if (p is not None))
 
         if MinMaxDerivativeStatisticsBase in self.__class__.__mro__:
             super().__init__(
diff --git a/hysop/operator/base/poisson_curl.py b/hysop/operator/base/poisson_curl.py
index d6efbe82be81edab8ebdfede97d5f893ccc17bdd..a112bf670cfaecf6af598e8685c486766b354ee3 100644
--- a/hysop/operator/base/poisson_curl.py
+++ b/hysop/operator/base/poisson_curl.py
@@ -225,13 +225,12 @@ class PoissonCurlOperatorBase:
         # input and output fields
         vtopology = variables[velocity]
         wtopology = variables[vorticity]
-        input_params = {}
+        input_params = set()
         input_fields = {vorticity: wtopology}
         output_fields = {velocity: vtopology}
         if should_diffuse:
             assert dt is not None, "Diffusion timestep has not been given."
-            input_params[diffusion.name] = diffusion
-            input_params[dt.name] = dt
+            input_params.update({diffusion, dt})
         if should_diffuse or should_project:
             output_fields[vorticity] = wtopology
 
@@ -277,7 +276,7 @@ class PoissonCurlOperatorBase:
             msg = "Cannot compute output vorticity energy because there is no output vorticity !"
             assert should_diffuse or should_project, msg
 
-        output_params = {}
+        output_params = set()
         compute_Win_E_param = EnergyDumper.build_energy_parameter(
             do_compute=do_compute_Win_E,
             field=vorticity,
diff --git a/hysop/operator/base/spectral_operator.py b/hysop/operator/base/spectral_operator.py
index 9a2a72097cebdeddb9d8354ff872d636aa234d63..ac0431596c34c7c5229ec77b4e0dda28fcc4effd 100644
--- a/hysop/operator/base/spectral_operator.py
+++ b/hysop/operator/base/spectral_operator.py
@@ -226,7 +226,7 @@ class SpectralOperatorBase:
         for tg in self.transform_groups.values():
             output_parameters.update(tg.output_parameters)
         for p in output_parameters:
-            self.output_params[p.name] = p
+            self.output_params.update({p})
 
     def initialize(self, **kwds):
         super().initialize(**kwds)
diff --git a/hysop/operator/base/stretching_dir.py b/hysop/operator/base/stretching_dir.py
index 1d6c518640f49c5cf44a8029efc915f633091991..63011b17313c8511b00d592db24057d19c967a7f 100644
--- a/hysop/operator/base/stretching_dir.py
+++ b/hysop/operator/base/stretching_dir.py
@@ -201,8 +201,8 @@ class DirectionalStretchingBase:
 
         input_fields = {velocity: variables[velocity], vorticity: variables[vorticity]}
         output_fields = {vorticity: variables[vorticity]}
-        input_params = {dt.name: dt}
-        output_params = {}
+        input_params = {dt}
+        output_params = set()
 
         super().__init__(
             input_fields=input_fields,
diff --git a/hysop/operator/gradient.py b/hysop/operator/gradient.py
index 7a004a8807c888a311020578f181c9d151eac99e..b076a8ab763c41b3bbb4cb40ff51ed428598ae3b 100644
--- a/hysop/operator/gradient.py
+++ b/hysop/operator/gradient.py
@@ -533,15 +533,13 @@ class MinMaxGradientStatistics(Gradient):
                         msg = f">Parameter {param.pretty_name} set to:\n{param.value}"
                         vprint(msg)
 
-        _phony_input_params = {}
-        _phony_output_params = {}
+        _phony_input_params = set()
+        _phony_output_params = set()
         for pname in _names.keys():
             if pname in extra_params:
                 param = parameters[pname]
-                _phony_input_params.update(
-                    {p.name: p for p in extra_params[pname].ravel()}
-                )
-                _phony_output_params[param.name] = param
+                _phony_input_params.update({p for p in extra_params[pname].ravel()})
+                _phony_output_params.update({param})
         op = MergeTensorViewsOperator(
             name=name.format(gradF.name),
             pretty_name=pretty_name.format(gradF.pretty_name),
diff --git a/hysop/operator/inter_task_param_comm.py b/hysop/operator/inter_task_param_comm.py
deleted file mode 100644
index 907c9ac9ca9152a924a121f1382e2a4b0023255e..0000000000000000000000000000000000000000
--- a/hysop/operator/inter_task_param_comm.py
+++ /dev/null
@@ -1,151 +0,0 @@
-# Copyright (c) HySoP 2011-2024
-#
-# This file is part of HySoP software.
-# See "https://particle_methods.gricad-pages.univ-grenoble-alpes.fr/hysop-doc/"
-# for further info.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-"""
-Inter-task parameters exchange.
-"""
-import numpy as np
-from hysop.core.graph.computational_node_frontend import ComputationalGraphNodeFrontend
-from hysop.constants import Implementation, HYSOP_DIM
-from hysop.backend.host.host_operator import HostOperator
-from hysop.tools.htypes import check_instance
-from hysop.parameters.scalar_parameter import ScalarParameter, TensorParameter
-from hysop.tools.decorators import debug
-from hysop.core.graph.graph import op_apply
-from hysop import MPIParams, MPI
-
-
-class PythonInterTaskParamComm(HostOperator):
-    def __new__(cls, parameter, source_task, dest_task, domain, **kwds):
-        return super().__new__(cls, **kwds)
-
-    @debug
-    def __init__(self, parameter, source_task, dest_task, domain, **kwds):
-        """
-        Communicate parameter through tasks
-
-        parameter
-        ----------
-        parameter: tuple of ScalarParameter or TensorParameter
-            parameters to communicate
-        source_task: integer
-            task id to be used as source for communication
-        parent_task: integer
-            task id to be used as context for inter-communication
-
-        """
-        check_instance(parameter, tuple, values=(ScalarParameter, TensorParameter))
-        check_instance(source_task, (int, HYSOP_DIM))
-        check_instance(dest_task, (int, HYSOP_DIM))
-        input_fields, output_fields = {}, {}
-        super().__init__(
-            input_params={_.name: _ for _ in parameter},
-            output_params={_.name: _ for _ in parameter},
-            input_fields=input_fields,
-            output_fields=output_fields,
-            **kwds,
-        )
-        self.domain = domain
-        self.source_task = source_task
-        self.dest_task = dest_task
-        self.task_is_source = domain.is_on_task(source_task)
-        self.task_is_dest = domain.is_on_task(dest_task)
-        self.inter_comm = domain.task_intercomm(
-            dest_task if self.task_is_source else source_task
-        )
-        if self.inter_comm.is_inter:
-            # Disjoint tasks with real inter-communicator
-            self._the_apply = self._apply_intercomm
-        elif self.inter_comm.is_intra:
-            # Overlapping tasks using an intra-communicator fron union of tasks procs
-            self._the_apply = self._apply_intracomm
-
-        self._all_params_by_type = {}
-        for p in sorted(self.parameters, key=lambda _: _.name):
-            if not p.dtype in self._all_params_by_type:
-                self._all_params_by_type[p.dtype] = []
-            self._all_params_by_type[p.dtype].append(p)
-        self._send_temp_by_type = {
-            t: np.zeros((len(self._all_params_by_type[t]),), dtype=t)
-            for t in self._all_params_by_type.keys()
-        }
-        self._recv_temp_by_type = {
-            t: np.zeros((len(self._all_params_by_type[t]),), dtype=t)
-            for t in self._all_params_by_type.keys()
-        }
-
-    @op_apply
-    def apply(self, **kwds):
-        self._the_apply(**kwds)
-
-    def _apply_intercomm(self, **kwds):
-        """Disjoint tasks so inter-comm bcast is needed."""
-        for t in self._all_params_by_type.keys():
-            if self.task_is_source:
-                self._send_temp_by_type[t][...] = [
-                    p() for p in self._all_params_by_type[t]
-                ]
-                self.inter_comm.bcast(
-                    self._send_temp_by_type[t],
-                    root=MPI.ROOT if self.domain.task_rank() == 0 else MPI.PROC_NULL,
-                )
-            if self.task_is_dest:
-                self._recv_temp_by_type[t] = self.inter_comm.bcast(
-                    self._send_temp_by_type[t], root=0
-                )
-                for p, v in zip(
-                    self._all_params_by_type[t], self._recv_temp_by_type[t]
-                ):
-                    p.value = v
-
-    def _apply_intracomm(self, **kwds):
-        """Communicator is an intra-communicator defined as tasks' comm union.
-        Single broadcast is enough.
-        """
-        for t in self._all_params_by_type.keys():
-            if self.task_is_source and self.domain.task_rank() == 0:
-                self._send_temp_by_type[t][...] = [
-                    p() for p in self._all_params_by_type[t]
-                ]
-            self._recv_temp_by_type[t] = self.inter_comm.bcast(
-                self._send_temp_by_type[t],
-                self.domain.task_root_in_parent(self.source_task),
-            )
-            if self.task_is_dest:
-                for p, v in zip(
-                    self._all_params_by_type[t], self._recv_temp_by_type[t]
-                ):
-                    p.value = v
-
-    @classmethod
-    def supports_mpi(cls):
-        return True
-
-
-class InterTaskParamComm(ComputationalGraphNodeFrontend):
-
-    __implementations = {Implementation.PYTHON: PythonInterTaskParamComm}
-
-    @classmethod
-    def implementations(cls):
-        return cls.__implementations
-
-    @classmethod
-    def default_implementation(cls):
-        return Implementation.PYTHON
diff --git a/hysop/operator/meson.build b/hysop/operator/meson.build
index c466dd37a156f81672b3d0aaf87e3c1fb6c86225..725effa38264728a955ae05a2ff8c2cf4e94efb6 100644
--- a/hysop/operator/meson.build
+++ b/hysop/operator/meson.build
@@ -16,7 +16,6 @@ src_python  = [
   'gradient.py',
   'hdf_io.py',
   'integrate.py',
-  'inter_task_param_comm.py',
   'kinetic_energy.py',
   'mean_field.py',
   'memory_reordering.py',
diff --git a/hysop/operator/parameter_plotter.py b/hysop/operator/parameter_plotter.py
index cd9d821024569f279f39ad435136e4b24140b729..e0bad0403e2622f59a5a98712481c6b0257dfe11 100644
--- a/hysop/operator/parameter_plotter.py
+++ b/hysop/operator/parameter_plotter.py
@@ -153,7 +153,7 @@ class ParameterPlotter(PlottingOperator):
         self, name, parameters, alloc_size=128, fig=None, axes=None, shape=None, **kwds
     ):
 
-        input_params = {}
+        input_params = set()
         if (fig is not None) and (axes is not None):
             import matplotlib
 
@@ -162,7 +162,7 @@ class ParameterPlotter(PlottingOperator):
             check_instance(parameters, dict, keys=matplotlib.axes.Axes, values=dict)
             for params in parameters.values():
                 check_instance(params, dict, keys=str, values=ScalarParameter)
-                input_params.update({p.name: p for p in params.values()})
+                input_params.update(set(params.values()))
         else:
             custom_axes = False
             _parameters = {}
diff --git a/hysop/operator/plotters.py b/hysop/operator/plotters.py
index 5ebab8a43dcaa4ff1608b5e4ad238eb260b9a6a5..d592b4c8da97311630d892f9e2a1ddd3176c988a 100644
--- a/hysop/operator/plotters.py
+++ b/hysop/operator/plotters.py
@@ -379,7 +379,7 @@ class ParameterPlotter(PlottingOperator):
         self, name, parameters, alloc_size=128, fig=None, axes=None, shape=None, **kwds
     ):
 
-        input_params = {}
+        input_params = set()
         if (fig is not None) and (axes is not None):
             import matplotlib
 
@@ -388,7 +388,7 @@ class ParameterPlotter(PlottingOperator):
             check_instance(parameters, dict, keys=matplotlib.axes.Axes, values=dict)
             for params in parameters.values():
                 check_instance(params, dict, keys=str, values=ScalarParameter)
-                input_params.update({p.name: p for p in params.values()})
+                input_params.update(set(params.values()))
         else:
             custom_axes = False
             _parameters = {}
diff --git a/hysop/operator/redistribute.py b/hysop/operator/redistribute.py
index fd980ad245d23108296b3660d5ce761899e08470..389013b46b223b33a5f737467e6cd1fefd2c53dc 100644
--- a/hysop/operator/redistribute.py
+++ b/hysop/operator/redistribute.py
@@ -35,6 +35,7 @@ from hysop.topology.cartesian_topology import CartesianTopology
 from hysop.core.mpi.redistribute import (
     RedistributeIntra,
     RedistributeInter,
+    RedistributeInterParam,
     RedistributeOperatorBase,
 )
 from hysop.core.graph.node_generator import ComputationalGraphNodeGenerator
diff --git a/hysop/operator/tests/test_custom_symbolic.py b/hysop/operator/tests/test_custom_symbolic.py
index ac3458c3b326049d44e3dbee502ff388c19f46af..07ce879a79362eec004a743a914bf8e78053277f 100644
--- a/hysop/operator/tests/test_custom_symbolic.py
+++ b/hysop/operator/tests/test_custom_symbolic.py
@@ -422,9 +422,9 @@ class TestCustomSymbolic:
                 refin["f"][field] = ifield.sdata.get().handle.copy()
                 refin["dfields"][field] = ifield
                 in_names.append(field.name)
-            for pname, param in problem.input_params.items():
-                refin["p"][pname] = param.value
-                in_names.append(pname)
+            for param in problem.input_params.keys():
+                refin["p"][param.name] = param.value
+                in_names.append(param.name)
 
             refout = {"f": {}, "p": {}, "dfields": {}}
             out_names = []
@@ -440,11 +440,11 @@ class TestCustomSymbolic:
                         res = res[ofield.compute_slices]
                     refout["f"][field] += (res,)
                     out_names.append(field.name + f"::{i}")
-            for pname, param in problem.output_params.items():
-                refout["p"][pname] = compute_outputs(
+            for param in problem.output_params.keys():
+                refout["p"][param.name] = compute_outputs(
                     refin["f"], refin["p"], refin["dfields"], param, None
                 )
-                out_names.append(pname)
+                out_names.append(param.name)
 
             problem.apply(**apply_kwds)
 
diff --git a/hysop/operators.py b/hysop/operators.py
index 08604a29b03f15b23221066777e6db4143b2506c..95fab15b4bf51a5ee7d15b0d7a35eeff1b0e74bb 100644
--- a/hysop/operators.py
+++ b/hysop/operators.py
@@ -35,7 +35,6 @@ from hysop.operator.transpose import Transpose
 from hysop.operator.misc import Noop, ForceTopologyState
 
 from hysop.operator.redistribute import Redistribute
-from hysop.operator.inter_task_param_comm import InterTaskParamComm
 
 from hysop.operator.analytic import AnalyticField
 from hysop.operator.mean_field import ComputeMeanField
diff --git a/hysop/problem.py b/hysop/problem.py
index b332cdfe8ac1a76a663ee65a015c8cbf01c67c67..c5a33b6941af48a2a9cc270fe78067c5e12a501f 100644
--- a/hysop/problem.py
+++ b/hysop/problem.py
@@ -89,8 +89,11 @@ class Problem(ComputationalGraph):
                 msg = f" Problem {msg} achieved, exiting ! "
                 vprint_banner(msg, at_border=2)
                 sys.exit(0)
-        size = self.mpi_params.size
-        avg_time = self.mpi_params.comm.allreduce(tm.interval) / size
+        comm = self.mpi_params.comm
+        if self.domain.has_tasks:
+            comm = self.domain.parent_comm
+        size = comm.Get_size()
+        avg_time = comm.allreduce(tm.interval) / size
         msg = " Problem building took {} ({}s)"
         if size > 1:
             msg += f", averaged over {size} ranks. "
@@ -263,8 +266,11 @@ class Problem(ComputationalGraph):
                 if report_freq and (simu.current_iteration % report_freq) == 0:
                     self.profiler_report()
 
-        size = self.mpi_params.size
-        avg_time = self.mpi_params.comm.allreduce(tm.interval) / size
+        comm = self.mpi_params.comm
+        if self.domain.has_tasks:
+            comm = self.domain.parent_comm
+        size = comm.Get_size()
+        avg_time = comm.allreduce(tm.interval) / size
         msg = " Simulation took {} ({}s)"
         if size > 1:
             msg += f", averaged over {size} ranks. "
diff --git a/hysop/tools/spectral_utils.py b/hysop/tools/spectral_utils.py
index 978743bb54735c099de2bf78e95aa4c4bae9a7c0..5ddca43d24f27b02480ab843a630e9100f65c966 100644
--- a/hysop/tools/spectral_utils.py
+++ b/hysop/tools/spectral_utils.py
@@ -901,8 +901,8 @@ class EnergyDumper:
                 dtype=None,
                 initial_value=None,
             )
-            assert param.name not in output_params, param.name
-            output_params[param.name] = param
+            assert param not in output_params, param.name
+            output_params.update({param})
         else:
             param = None
         return param
diff --git a/hysop_examples/examples/scalar_advection/turbulent_scalar_advection.py b/hysop_examples/examples/scalar_advection/turbulent_scalar_advection.py
index 7ccb3a96425080eed2dbc90184f94f462622d4d4..d2b428ced1772036a307556cde0904f7f3b4031a 100644
--- a/hysop_examples/examples/scalar_advection/turbulent_scalar_advection.py
+++ b/hysop_examples/examples/scalar_advection/turbulent_scalar_advection.py
@@ -21,6 +21,7 @@
 # Example from JM Etancelin PhD (section 6.3)
 import os
 import numpy as np
+import mpi4py.MPI as MPI
 
 pi = np.pi
 cos = np.cos
@@ -28,6 +29,9 @@ sin = np.sin
 
 TASK_UW = 111
 TASK_SCALAR = 222
+if MPI.COMM_WORLD.Get_size() == 1:
+    TASK_UW = 999
+    TASK_SCALAR = 999
 
 
 def compute(args):
@@ -58,7 +62,6 @@ def compute(args):
         StrangSplitting,
         ParameterPlotter,
         DirectionalAdvection,
-        InterTaskParamComm,
         HDF_Writer,
     )
     from hysop.topology.cartesian_topology import CartesianTopology
@@ -290,9 +293,6 @@ def compute(args):
         criteria=AdvectionCriteria.W_INF,
         mpi_params=mpi_params[TASK_UW],
     )
-    dt_broadcast = InterTaskParamComm(
-        parameter=(dt,), domain=box, source_task=TASK_UW, dest_task=TASK_SCALAR
-    )
 
     # > Outputs
     dumpU = HDF_Writer(
@@ -352,11 +352,12 @@ def compute(args):
     # Create a simulation
     # (do not forget to specify the t and dt parameters here)
     simu = Simulation(
-        start=0.0,
-        end=5.0,
-        max_iter=9999,
+        start=args.tstart,
+        end=args.tend,
+        nb_iter=args.nb_iter,
+        max_iter=args.max_iter,
         dt0=1e-5,
-        times_of_interest=(4.5,),
+        times_of_interest=args.times_of_interest,
         t=t,
         dt=dt,
     )
@@ -374,8 +375,8 @@ def compute(args):
     )
 
     # Initialize fields on tasks where needed
-    problem.initialize_field(velo, formula=init_velocity)
     if box.is_on_task(TASK_UW):
+        problem.initialize_field(velo, formula=init_velocity)
         problem.initialize_field(vorti, formula=init_vorticity)
     if box.is_on_task(TASK_SCALAR):
         problem.initialize_field(scal, formula=init_scal)
@@ -427,6 +428,7 @@ if __name__ == "__main__":
         ndim=3,
         tstart=0.0,
         tend=12.0,
+        times_of_interest=(4.5,),
         npts=(64, 64, 128),
         snpts=(128, 128, 256),
         dump_period=1.0,
diff --git a/hysop_examples/examples/tasks/tasks.py b/hysop_examples/examples/tasks/tasks.py
index c6fa055f398eaf09cbe0790bf8985f8af02398db..2034b7702ee84c52b2d01ddac7f156758add7416 100755
--- a/hysop_examples/examples/tasks/tasks.py
+++ b/hysop_examples/examples/tasks/tasks.py
@@ -57,10 +57,13 @@ def compute(args):
     )
 
     # Define parameters and field (time and analytic field)
-    t = ScalarParameter("t", dtype=args.dtype)
+    t = ScalarParameter("t", dtype=args.dtype, initial_value=0.0)
+    p = ScalarParameter("p", dtype=args.dtype, initial_value=1.0, quiet=False)
+    q = ScalarParameter("q", dtype=args.dtype, initial_value=1.0, quiet=False)
     A = Field(domain=box, name="A", dtype=args.dtype)
     B = Field(domain=box, name="B", dtype=args.dtype)
     C = Field(domain=box, name="C", dtype=args.dtype)
+    D = Field(domain=box, name="D", dtype=args.dtype)
     if has_tasks_overlapping:
         Ap = Field(domain=box, name="Ap", dtype=args.dtype)
     else:
@@ -124,22 +127,27 @@ def compute(args):
     else:
         msg = f"Unknown implementation {impl}."
 
-    def custom_func1(_Ai, _Ao):
-        _Ai.data[0][...] = np.cos(_Ai.data[0])
+    def custom_func1(_Ai, _Ao, _p, _q):
+        _p.value = args.dtype(np.min(_Ai.data[0]))
+        _q.value = args.dtype(np.min(_Ai.data[0]))
+        _Ao.data[0][...] = np.cos(_Ai.data[0])
 
-    def custom_func2(_A, _B):
+    def custom_func2(_A, _B, _C):
         _B.data[0][...] = _A.data[0] ** 2
+        _C.data[0][...] = 1.0
 
-    def custom_func3(_B, _C, _A):
-        _A.data[0][...] = _B.data[0] + _C.data[0] ** 2
+    def custom_func3(_B, _C, _D, _p, _q, _A):
+        _A.data[0][...] = (
+            _B.data[0] + (_C.data[0] + _D.data[0] - 1.0) ** 2 + _p() + _q()
+        )
 
-    # TaskA      |   TaskB
-    # op1 A->A   |
-    # op2 A->B   |
-    #         `--B--,              // Inter-task communication step
-    #            |  op3 B,C->A
-    #         ,--A--'              // Inter-task communication step
-    # endA  A->A |
+    # TaskA          |   TaskB
+    # op1 A->A,p,q   |
+    # op2 A->B,C     |
+    #         `--B,C,p,q--,              // Inter-task communication step
+    #                |  op3 p,q,B,C,D->A
+    #         ,-----A---'               // Inter-task communication step
+    # endA  A->A     |
 
     # Note : without endA operator, the second communication is not
     # automatically inserted because A field is an output for task A
@@ -154,7 +162,7 @@ def compute(args):
         name="custom_op1",
         func=custom_func1,
         invars=(A,),
-        outvars=(A,),
+        outvars=(p, q, A),
         variables={
             A: npts,
         },
@@ -165,17 +173,17 @@ def compute(args):
         name="custom_op2",
         func=custom_func2,
         invars=(A,),
-        outvars=(B,),
-        variables={A: npts, B: npts},
+        outvars=(B, C),
+        variables={A: npts, B: npts, C: npts},
         implementation=impl,
         **_get_op_kwds(TASK_A),
     )
     custom3 = CustomOperator(
         name="custom_op3",
         func=custom_func3,
-        invars=(B, C),
+        invars=(p, q, B, C, D),
         outvars=(Ap,),
-        variables={Ap: npts, B: npts, C: npts},
+        variables={Ap: npts, B: npts, C: npts, D: npts},
         implementation=impl,
         **_get_op_kwds(TASK_B),
     )
@@ -220,14 +228,14 @@ def compute(args):
         t=t,
     )
 
-    def init_C(data, coords, component):
+    def init_D(data, coords, component):
         data[...] = np.sin(sum(x * x for x in coords))
 
     # Finally solve the problem
     if not has_tasks or box.is_on_task(TASK_A):
         problem.initialize_field(A, formula=compute_scalar)
     if not has_tasks or box.is_on_task(TASK_B):
-        problem.initialize_field(C, formula=init_C)
+        problem.initialize_field(D, formula=init_D)
     problem.solve(simu, dry_run=args.dry_run)
 
     for dA in set(A.discrete_fields.values()).union(set(Ap.discrete_fields.values())):
diff --git a/meson.build b/meson.build
index e42782da5a5f9987ebd3cf546df40d3e4ffd3286..491753c8012b3dd7b030c5a70ac224d821c96cde 100644
--- a/meson.build
+++ b/meson.build
@@ -150,12 +150,9 @@ endif
 
 # - Hysop python path is defined using "prefix" value sets during 'meson setup'
 # - or if you submit 'pip3 install .' using a virtual env the path
-# - will be something like: path/venv/lib/python3.xx/site-packages/hysop
+# - will be something like: path/venv/lib/python3.xx/site-packages
 hysop_pythonpath = py.get_install_dir()
 
-hysop_examples_pythonpath = hysop_pythonpath
-hysop_pythonpath = hysop_pythonpath / 'hysop'
-
 message('******************')
 message('Hysop python path:', hysop_pythonpath)
 message('******************')
@@ -179,6 +176,8 @@ subdir('src')
 
 # == Documentation ==
 if with_documentation == 'ON'
+  docs_env         = environment()
+  docs_env.set('PYTHONPATH', py.get_install_dir())
   doxygen = find_program('doxygen', required : true)
   doxygen_dot = find_program('dot', required : true)
   sphinx_api = find_program('sphinx-apidoc', required : true)
diff --git a/src/meson.build b/src/meson.build
index 99487c0c21df4fbc6d2842d934858c3039ff69f9..6266e6c7fb0c011c1b3f586f2640e036cbd8b135 100644
--- a/src/meson.build
+++ b/src/meson.build
@@ -109,7 +109,7 @@ if use_mpi == 'ON' # Have to be ON! OFF is deprecated!
   if use_fortran == 'ON'
     # - Looking for mpi lib for fortran.
     mpi_dep = dependency('mpi', language: 'fortran')
-    
+
     static_library_dep    += [mpi_dep]
 
     if mpi_dep.found()
@@ -155,12 +155,12 @@ if with_fftw == 'ON'
 
   # - Looking for fft3w or fft3wf or fftw3l (/usr/local/lib/pkgconfig)
   fft_dep     = dependency(         'fftw3', required:true)
-   
+
   if not fft_dep.found()
      message('raté')
-  endif 
+  endif
+
 
-  
   if fft_dep.found()
      fft_lib_path = fft_dep.get_variable(default_value: '"libdir" in pkgconfig not available!',
                                          pkgconfig    : 'libdir')
@@ -260,5 +260,5 @@ py.extension_module('f2hysop',
                     include_directories: extension_incdir,
                     dependencies       : py_dep,
                     install            : true,
-                    install_dir        : hysop_pythonpath,
+                    install_dir        : hysop_pythonpath / 'hysop',
                    )