From 66ae8984386321ea0248b1d0591dedc23439c3cb Mon Sep 17 00:00:00 2001
From: Jean-Baptiste Keck <Jean-Baptiste.Keck@imag.fr>
Date: Tue, 20 Nov 2018 11:54:58 +0100
Subject: [PATCH] add multibackend support for hdf_writer

---
 .../particles_above_salt_bc_3d.py             | 11 ++--
 .../backend/device/opencl/opencl_operator.py  | 39 ++++++++-------
 hysop/core/graph/computational_operator.py    | 50 +++++++++++--------
 hysop/operator/hdf_io.py                      | 38 +++++++++++++-
 4 files changed, 93 insertions(+), 45 deletions(-)

diff --git a/examples/particles_above_salt/particles_above_salt_bc_3d.py b/examples/particles_above_salt/particles_above_salt_bc_3d.py
index e0c655b8f..71a651160 100644
--- a/examples/particles_above_salt/particles_above_salt_bc_3d.py
+++ b/examples/particles_above_salt/particles_above_salt_bc_3d.py
@@ -48,7 +48,8 @@ def compute(args):
                                EnstrophyParameter, TimeParameters, \
                                VolumicIntegrationParameter
     from hysop.constants import Implementation, AdvectionCriteria, \
-                                BoxBoundaryCondition, BoundaryCondition
+                                BoxBoundaryCondition, BoundaryCondition, \
+                                Backend
 
     from hysop.operators import DirectionalAdvection, DirectionalStretching,       \
                                 Diffusion, ComputeMeanField,                       \
@@ -241,9 +242,11 @@ def compute(args):
     io_params = IOParams(filename='fields', frequency=args.dump_freq)
     dump_fields = HDF_Writer(name='dump',
                              io_params=io_params,
+                             force_backend=Backend.OPENCL,
                              variables={velo[0]: npts, 
                                         C: npts, 
-                                        S: npts})
+                                        S: npts},
+                             **extra_op_kwds)
 
     ### Adaptive timestep operator
     adapt_dt = AdaptiveTimeStep(dt, equivalent_CFL=True,
@@ -275,8 +278,8 @@ def compute(args):
         )
 
     problem = Problem(method=method)
-    problem.insert(poisson, diffuse_W, diffuse_S, diffuse_C,
-                   dump_fields,
+    problem.insert(poisson, dump_fields,
+                   diffuse_W, diffuse_S, diffuse_C,
                    splitting, 
                    min_max_U, min_max_W, adapt_dt)
     problem.build()
diff --git a/hysop/backend/device/opencl/opencl_operator.py b/hysop/backend/device/opencl/opencl_operator.py
index 50f604cb0..385be9e0d 100644
--- a/hysop/backend/device/opencl/opencl_operator.py
+++ b/hysop/backend/device/opencl/opencl_operator.py
@@ -162,26 +162,9 @@ class OpenClOperator(ComputationalGraphOperator):
     def check(self):
         super(OpenClOperator, self).check()
         self._check_cl_env()
-
+        
     @debug
-    def get_field_requirements(self):
-        """
-        Called just after handle_method(), ie self.method has been set.
-        topology requirements are:
-            1) min and max ghosts for each input and output variables
-            2) allowed splitting directions for cartesian topologies
-            3) required local and global transposition state, if any. 
-            and more
-        they are stored in self.input_field_requirements and
-        self.output_field_requirements.
-
-        keys are continuous fields and values are of type
-        hysop.fields.field_requirement.discretefieldrequirements
-
-        default is backend.opencl, no min or max ghosts and no specific
-        transposition state for each input and output variables.
-        """
-
+    def create_topology_descriptors(self): 
         # by default we create OPENCL (gpu) TopologyDescriptors 
         for (field, topo_descriptor) in self.input_fields.iteritems():
             topo_descriptor = TopologyDescriptor.build_descriptor(
@@ -201,6 +184,24 @@ class OpenClOperator(ComputationalGraphOperator):
                     cl_env=self.cl_env)
             self.output_fields[field] = topo_descriptor
 
+    @debug
+    def get_field_requirements(self):
+        """
+        Called just after handle_method(), ie self.method has been set.
+        topology requirements are:
+            1) min and max ghosts for each input and output variables
+            2) allowed splitting directions for cartesian topologies
+            3) required local and global transposition state, if any. 
+            and more
+        they are stored in self.input_field_requirements and
+        self.output_field_requirements.
+
+        keys are continuous fields and values are of type
+        hysop.fields.field_requirement.discretefieldrequirements
+
+        default is backend.opencl, no min or max ghosts and no specific
+        transposition state for each input and output variables.
+        """
         requirements = super(OpenClOperator, self).get_field_requirements()
 
         for (is_input, reqs) in requirements.iter_requirements():
diff --git a/hysop/core/graph/computational_operator.py b/hysop/core/graph/computational_operator.py
index 9d63b617c..e7c591666 100644
--- a/hysop/core/graph/computational_operator.py
+++ b/hysop/core/graph/computational_operator.py
@@ -158,39 +158,47 @@ class ComputationalGraphOperator(ComputationalGraphNode):
         is initialized.
         """
         pass
+         
+    @debug
+    def create_topology_descriptors(self):
+        """
+        Called in get_field_requirements, just after handle_method
+         Topology requirements (or descriptors) are:
+             1) min and max ghosts for each input and output variables
+             2) allowed splitting directions for cartesian topologies
+        """
+        # by default we create HOST (cpu) TopologyDescriptors
+        for (field, topo_descriptor) in self.input_fields.iteritems():
+            topo_descriptor = TopologyDescriptor.build_descriptor(
+                    backend=Backend.HOST,
+                    operator=self,
+                    field=field,
+                    handle=topo_descriptor)
+            self.input_fields[field] = topo_descriptor
+
+        for (field, topo_descriptor) in self.output_fields.iteritems():
+            topo_descriptor = TopologyDescriptor.build_descriptor(
+                    backend=Backend.HOST,
+                    operator=self,
+                    field=field,
+                    handle=topo_descriptor)
+            self.output_fields[field] = topo_descriptor
 
     @debug
     def get_field_requirements(self):
          """
          Called just after handle_method(), ie self.method has been set.
-         Topology requirements are:
-             1) min and max ghosts for each input and output variables
-             2) allowed splitting directions for cartesian topologies
          Field requirements are:
              1) required local and global transposition state, if any.
              2) required memory ordering (either C or Fortran)
          Default is Backend.HOST, no min or max ghosts, MemoryOrdering.ANY
          and no specific default transposition state for each input and output variables.
          """
+        
+         # Create the topology descriptors
+         self.create_topology_descriptors()
 
-         # by default we create HOST (cpu) TopologyDescriptors
-         for (field, topo_descriptor) in self.input_fields.iteritems():
-             topo_descriptor = TopologyDescriptor.build_descriptor(
-                     backend=Backend.HOST,
-                     operator=self,
-                     field=field,
-                     handle=topo_descriptor)
-             self.input_fields[field] = topo_descriptor
-
-         for (field, topo_descriptor) in self.output_fields.iteritems():
-             topo_descriptor = TopologyDescriptor.build_descriptor(
-                     backend=Backend.HOST,
-                     operator=self,
-                     field=field,
-                     handle=topo_descriptor)
-             self.output_fields[field] = topo_descriptor
-
-         # and we use default DiscreteFieldRequirements (ie. no min ghosts, no max ghosts,
+         # We use default DiscreteFieldRequirements (ie. no min ghosts, no max ghosts,
          # can_split set to True in all directions, all TranspositionStates
          # and C memory ordering).
          input_field_requirements  = {}
diff --git a/hysop/operator/hdf_io.py b/hysop/operator/hdf_io.py
index 7ec7b59eb..f9dc9d41e 100755
--- a/hysop/operator/hdf_io.py
+++ b/hysop/operator/hdf_io.py
@@ -21,6 +21,7 @@ from hysop.core.graph.computational_graph import ComputationalGraphOperator
 from hysop.fields.continuous_field import Field
 from hysop.topology.cartesian_descriptor import CartesianTopologyDescriptors
 from hysop.core.memory.memory_request import MemoryRequest
+from hysop.topology.topology_descriptor import TopologyDescriptor
 
 class HDF_IO(ComputationalGraphOperator):
     """
@@ -39,7 +40,7 @@ class HDF_IO(ComputationalGraphOperator):
 
     def __init__(self, var_names=None, 
                 name_prefix='', name_postfix='', 
-                **kwds):
+                force_backend=None, **kwds):
         """Read/write some fields data from/into hdf/xmdf files.
         Parallel io.
 
@@ -52,6 +53,8 @@ class HDF_IO(ComputationalGraphOperator):
             Optional name prefix for variables.
         name_postfix: str, optional
             Optional name postfix for variables.
+        force_backend: hysop.constants.Backend
+            Force the source backend for fields.
         kwds: dict
             Base class arguments.
 
@@ -118,6 +121,39 @@ class HDF_IO(ComputationalGraphOperator):
         self._get_filename = lambda i=None: None
         # File Object that holds hdf file
         self._hdf_file = None
+        # field backend
+        self._force_backend = first_not_None(force_backend, Backend.HOST)
+        td_kwds = {}
+        if (force_backend is Backend.OPENCL):
+            assert 'cl_env' in kwds
+            td_kwds['cl_env'] = kwds.pop('cl_env')
+        self._td_kwds = td_kwds
+    
+    @debug
+    def create_topology_descriptors(self):
+        """
+        Called in get_field_requirements, just after handle_method
+         Topology requirements (or descriptors) are:
+             1) min and max ghosts for each input and output variables
+             2) allowed splitting directions for cartesian topologies
+        """
+        # by default we create HOST (cpu) TopologyDescriptors
+        td_kwds = self._td_kwds
+        for (field, topo_descriptor) in self.input_fields.iteritems():
+            topo_descriptor = TopologyDescriptor.build_descriptor(
+                    backend=self._force_backend,
+                    operator=self,
+                    field=field,
+                    handle=topo_descriptor, **td_kwds)
+            self.input_fields[field] = topo_descriptor
+
+        for (field, topo_descriptor) in self.output_fields.iteritems():
+            topo_descriptor = TopologyDescriptor.build_descriptor(
+                    backend=self._force_backend,
+                    operator=self,
+                    field=field,
+                    handle=topo_descriptor, **td_kwds)
+            self.output_fields[field] = topo_descriptor
     
     @debug
     def get_field_requirements(self):
-- 
GitLab