From 73c49a6d700791df54b758f4f89fbaf33d74fe03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Franck=20P=C3=A9rignon?= <franck.perignon@imag.fr> Date: Wed, 17 Sep 2014 18:58:38 +0200 Subject: [PATCH] Update adaptative TS for intercomm broadcast - Tests ok --- HySoP/hysop/mpi/bridge_inter.py | 4 +- .../operator/tests/test_adaptive_time_step.py | 56 +++++++++++++++---- 2 files changed, 48 insertions(+), 12 deletions(-) diff --git a/HySoP/hysop/mpi/bridge_inter.py b/HySoP/hysop/mpi/bridge_inter.py index ea3e83ac5..ae500e18f 100644 --- a/HySoP/hysop/mpi/bridge_inter.py +++ b/HySoP/hysop/mpi/bridge_inter.py @@ -42,10 +42,10 @@ class BridgeInter(object): current_task = self._topology.domain.currentTask() # True if current process is in the 'from' group' - task_is_source = current_task() == self.source_id + task_is_source = current_task == self.source_id # True if current process is in the 'to' group - task_is_target = current_task() == self.target_id + task_is_target = current_task == self.target_id # Ensure that current process belongs to one and only one task. assert task_is_source or task_is_target diff --git a/HySoP/hysop/operator/tests/test_adaptive_time_step.py b/HySoP/hysop/operator/tests/test_adaptive_time_step.py index 63c5df60c..6d9b1b9b8 100644 --- a/HySoP/hysop/operator/tests/test_adaptive_time_step.py +++ b/HySoP/hysop/operator/tests/test_adaptive_time_step.py @@ -42,13 +42,13 @@ def test_adapt(): Here we just check if discr/setup/apply process goes well. """ velo, vorti = init() - dt = VariableParameter(data=0.0125, name='dt') - op = AdaptTimeStep(velo, vorti, dt_adapt=dt, + simu = Simulation(nbIter=2) + op = AdaptTimeStep(velo, vorti, simulation=simu, discretization=d3d, lcfl=0.125, cfl=0.5) op.discretize() op.setup() - simu = Simulation(nbIter=2) - op.apply(simu) + op.apply() + op.wait() def test_adapt_2(): @@ -56,13 +56,13 @@ def test_adapt_2(): The same but with file output """ velo, vorti = init() - dt = VariableParameter(data=0.0125, name='dt') - op = AdaptTimeStep(velo, vorti, dt_adapt=dt, io_params=True, + simu = Simulation(nbIter=2) + op = AdaptTimeStep(velo, vorti, simulation=simu, io_params=True, discretization=d3d, lcfl=0.125, cfl=0.5) op.discretize() op.setup() - simu = Simulation(nbIter=2) - op.apply(simu) + op.apply() + op.wait() filename = op.io_params.filename assert os.path.exists(filename) @@ -72,8 +72,8 @@ def test_adapt_3(): The same but with external work vector """ velo, vorti = init() - dt = VariableParameter(data=0.0125, name='dt') - op = AdaptTimeStep(velo, vorti, dt_adapt=dt, io_params=True, + simu = Simulation(nbIter=2) + op = AdaptTimeStep(velo, vorti, simulation=simu, io_params=True, discretization=d3d, lcfl=0.125, cfl=0.5) op.discretize() wk_p = op.get_work_properties() @@ -86,11 +86,47 @@ def test_adapt_3(): op.setup(rwork=rwork) simu = Simulation(nbIter=2) op.apply(simu) + op.wait() filename = op.io_params.filename assert os.path.exists(filename) +def test_adapt_4(): + """ + The same but with external work vector + """ + # MPI procs are distributed among two tasks + GPU = 4 + CPU = 1 + VISU = 12 + from parmepy.mpi.main_var import main_size + proc_tasks = [CPU, ] * main_size + + if main_size > 2: + proc_tasks[-1] = GPU + proc_tasks[0] = GPU + #proc_tasks[1] = VISU + + dom = pp.Box(dimension=3, proc_tasks=proc_tasks) + velo = Field(domain=dom, formula=computeVel, + name='Velocity', isVector=True) + vorti = Field(domain=dom, formula=computeVort, + name='Vorticity', isVector=True) + + from parmepy.tools.parameters import MPI_params + cpu_task = MPI_params(comm=dom.comm_task, task_id=CPU) + simu = Simulation(nbIter=2) + op = AdaptTimeStep(velo, vorti, simulation=simu, io_params=True, + discretization=d3d, lcfl=0.125, cfl=0.5, + mpi_params=cpu_task) + if dom.isOnTask(CPU): + op.discretize() + op.setup() + op.apply() + op.wait() + if __name__ == "__main__": test_adapt() test_adapt_2() test_adapt_3() + test_adapt_4() -- GitLab