From d38c694b6540300c9412208f2dcc66fd743cf331 Mon Sep 17 00:00:00 2001 From: JM Etancelin <jean-matthieu.etancelin@univ-pau.fr> Date: Wed, 8 Apr 2020 16:44:53 +0200 Subject: [PATCH] add to custom operator a ghosts parameter if needed --- hysop/backend/host/python/operator/custom.py | 15 ++++++++++++++- hysop/core/graph/graph.py | 4 ++-- hysop/operator/custom.py | 5 +++-- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/hysop/backend/host/python/operator/custom.py b/hysop/backend/host/python/operator/custom.py index 11fce77c6..74641b19f 100644 --- a/hysop/backend/host/python/operator/custom.py +++ b/hysop/backend/host/python/operator/custom.py @@ -9,7 +9,7 @@ from hysop.core.graph.graph import op_apply class PythonCustomOperator(HostOperator): @debug - def __init__(self, func, invars=None, outvars=None, variables=None, **kwds): + def __init__(self, func, invars=None, outvars=None, variables=None, ghosts=None, **kwds): check_instance(invars, (tuple, list), values=(Field, Parameter), allow_none=True) check_instance(outvars, (tuple, list), values=(Field, Parameter), @@ -17,6 +17,7 @@ class PythonCustomOperator(HostOperator): check_instance(variables, dict, keys=Field, values=CartesianTopologyDescriptors, allow_none=True) + check_instance(ghosts, int, allow_none=True) input_fields, output_fields = {}, {} input_params, output_params = {}, {} if invars is not None: @@ -33,6 +34,7 @@ class PythonCustomOperator(HostOperator): output_params[v.name] = v self.invars, self.outvars = invars, outvars self.func = func + self._ghosts = ghosts super(PythonCustomOperator, self).__init__( input_fields=input_fields, output_fields=output_fields, @@ -43,6 +45,17 @@ class PythonCustomOperator(HostOperator): def supports_mpi(cls): return True + @debug + def get_field_requirements(self): + requirements = super(PythonCustomOperator, self).get_field_requirements() + if not self._ghosts is None: + for it in requirements.iter_requirements(): + if not it[1] is None: + is_input, (field, td, req) = it + min_ghosts = (max(g, self._ghosts) for g in req.min_ghosts.copy()) + req.min_ghosts = min_ghosts + return requirements + @debug def discretize(self): if self.discretized: diff --git a/hysop/core/graph/graph.py b/hysop/core/graph/graph.py index 49b5795f5..09e5a7f57 100644 --- a/hysop/core/graph/graph.py +++ b/hysop/core/graph/graph.py @@ -3,7 +3,7 @@ import graph_tool as gt from graph_tool import Graph, GraphView from graph_tool import topology, stats, search from hysop.tools.decorators import not_implemented, debug, wraps, profile -from hysop import vprint +from hysop import dprint class ComputationalGraphNodeData(object): @@ -171,7 +171,7 @@ def op_apply(f): if not op.to_be_skipped(): return f(*args, **kwds) else: - vprint("Skip {}".format(op.name)) + dprint("Skip {}".format(op.name)) return return ret return apply diff --git a/hysop/operator/custom.py b/hysop/operator/custom.py index 44ba16308..3a00efaa0 100755 --- a/hysop/operator/custom.py +++ b/hysop/operator/custom.py @@ -31,11 +31,12 @@ class CustomOperator(ComputationalGraphNodeFrontend): return Implementation.PYTHON @debug - def __init__(self, func, invars=None, outvars=None, **kwds): + def __init__(self, func, invars=None, outvars=None, ghosts=None, **kwds): check_instance(invars, (tuple, list), values=(Field, Parameter), allow_none=True) check_instance(outvars, (tuple, list), values=(Field, Parameter), allow_none=True) + check_instance(ghosts, int, allow_none=True) from inspect import getargspec as signature # should be inspect.signature in python 3 nb_args = len(signature(func).args) nb_in_f, nb_in_p, nb_out_f, nb_out_p = 0, 0, 0, 0 @@ -57,4 +58,4 @@ class CustomOperator(ComputationalGraphNodeFrontend): assert nb_args == nb_in_f + nb_in_p + nb_out_f + nb_out_p, msg super(CustomOperator, self).__init__( - func=func, invars=invars, outvars=outvars, **kwds) + func=func, invars=invars, outvars=outvars, ghosts=ghosts, **kwds) -- GitLab