diff --git a/examples/particles_above_salt/particles_above_salt_periodic.py b/examples/particles_above_salt/particles_above_salt_periodic.py index e1f170ac9bd2e36ebc84e587a0a865f3f2c22924..a22f35ceeb02b590385180f20536b6f1f09a60b0 100644 --- a/examples/particles_above_salt/particles_above_salt_periodic.py +++ b/examples/particles_above_salt/particles_above_salt_periodic.py @@ -26,7 +26,7 @@ def delta(Ys, l0): Y0 = 1 for Yi in Ys: Y0 = Y0*Yi - return 0.02*l0*(np.random.rand(*Y0.shape)-0.5) + return 0.1*l0*(np.random.rand(*Y0.shape)-0.5) def init_concentration(data, coords, l0): X = coords[-1] @@ -58,7 +58,7 @@ def compute(args): PoissonRotational, AdaptiveTimeStep, \ Enstrophy, MinMaxFieldStatistics, StrangSplitting, \ ParameterPlotter, Integrate, HDF_Writer, \ - DirectionalSymbolic + DirectionalSymbolic, ComputeMeanField from hysop.methods import SpaceDiscretization, Remesh, TimeIntegrator, \ ComputeGranularity, Interpolation @@ -73,7 +73,7 @@ def compute(args): # Constants l0 = 1.5 #initial thickness of the profile - (Sc, tau, Vp, Rs, Xo, Xn, N) = (0.70, 25, 0.04, 2.0, (-600,0), (600,750), (1537, 512)) + (Sc, tau, Vp, Rs, Xo, Xn, N) = (0.70, 25, 0.04, 2.0, (-600,0), (600,750), (1537, 487)) #(Sc, tau, Vp, Rs, Xo, Xn, N) = (7.00, 25, 0.04, 2.0, (-110,0,0), (65,100,100), (1537, 512, 512)) nu_S = 1.0/Sc @@ -246,6 +246,17 @@ def compute(args): S: npts, _lambda: npts}) + #> Operator to compute and save mean fields + axes = list(range(1, dim)) + view = [slice(None,None,None),]*dim + view[0] = (-200.0,+200.0) + view = tuple(view) + io_params = IOParams(filename='horizontally_averaged_profiles', frequency=0) + compute_mean_fields = ComputeMeanField(name='mean', + fields={C: (view, axes), S: (view, axes)}, + variables={C: npts, S: npts}, + io_params=io_params) + ### Adaptive timestep operator dx = np.min(np.divide(box.length, np.asarray(npts)-1)) S_dt = 0.5*(dx**2)/nu_S @@ -285,6 +296,7 @@ def compute(args): problem.insert(poisson, splitting, dump_fields, + compute_mean_fields, min_max_U, min_max_W, adapt_dt) problem.build() @@ -353,7 +365,8 @@ if __name__=='__main__': box_origin=(0.0,), box_length=(1.0,), tstart=0.0, tend=500.0, dt=1e-6, cfl=0.5, lcfl=0.125, - dump_freq=10) + dump_times=tuple(float(x) for x in range(500)), + dump_freq=0) parser.run(compute) diff --git a/hysop/operator/mean_field.py b/hysop/operator/mean_field.py new file mode 100644 index 0000000000000000000000000000000000000000..7c7756cf12cdeb8ff12d3a22cc4c994e48991435 --- /dev/null +++ b/hysop/operator/mean_field.py @@ -0,0 +1,139 @@ + +"""I/O operators + +.. currentmodule:: hysop.operator.hdf_io + +* :class:`~HDF_Writer` : operator to write fields into an hdf file +* :class:`~HDF_Reader` : operator to read fields from an hdf file +* :class:`~ComputeMeanField` abstract interface for hdf io classes + +""" +import os +from hysop.tools.decorators import debug +from hysop.tools.types import check_instance, first_not_None, to_tuple, to_list +from hysop.tools.numpywrappers import npw +from hysop.tools.io_utils import IO, IOParams +from hysop.constants import Backend, TranspositionState +from hysop.core.graph.graph import op_apply +from hysop.core.graph.computational_graph import ComputationalGraphOperator +from hysop.topology.cartesian_descriptor import CartesianTopologyDescriptors +from hysop.fields.continuous_field import Field + + +class ComputeMeanField(ComputationalGraphOperator): + """ + Interface to compute the mean of a field in chosen axes. + """ + + @debug + def __init__(self, fields, variables, io_params, + **kwds): + """ + Compute and write the mean of fields in given direction, possiblity on a subview. + + Parameters + ---------- + fields: dict Field: (view, axes) + Keys are the field, values represent underlying view and axes where to take the mean. + View = array_like of slices, Ellipsis, or 2D-tuples. + A tuple represent minimal and maximal physical coordinates. + variables: dict + dictionary of fields as keys and topologies as values. + io_params : :class:`hysop.tools.io_utils.IOParams` + File i/o config (filename, format ...) + Filename is used as a subfolder. + kwds: dict + Base class arguments. + """ + check_instance(fields, dict, keys=Field, values=tuple) + check_instance(variables, dict, keys=Field, values=CartesianTopologyDescriptors) + + input_fields = { field: variables[field] for field in fields.keys() } + super(ComputeMeanField, self).__init__(input_fields=input_fields, io_params=io_params, **kwds) + self.averaged_fields = fields + + @debug + def get_field_requirements(self): + # set good transposition state and no ghosts (because of OPENCL mean) + requirements = super(ComputeMeanField, self).get_field_requirements() + for is_input, ireq in requirements.iter_requirements(): + if (ireq is None): + continue + (field, td, req) = ireq + req.axes = (TranspositionState[field.dim].default_axes(),) + req.min_ghosts=(0,)*field.dim + req.max_ghosts=(0,)*field.dim + return requirements + + def discretize(self): + super(ComputeMeanField, self).discretize() + averaged_dfields = {} + for field, (view, axes) in self.averaged_fields.iteritems(): + dfield = self.input_discrete_fields[field] + + axes = to_tuple(first_not_None(axes, range(dfield.dim))) + assert tuple(set(axes))==axes, axes + check_instance(axes, tuple, values=int, minsize=0, maxsize=field.dim) + + view = first_not_None(view, Ellipsis) + if (view is Ellipsis): + views[field] = Ellipsis + continue + view = to_list(view) + check_instance(view, list, values=(tuple, type(Ellipsis), slice), size=dfield.dim) + for i,vi in enumerate(view): + if isinstance(vi, tuple): + assert len(vi)==2 + Xmin, Xmax = vi + coords = dfield.mesh.local_coords[i] + try: + Imin = npw.where(coords>=Xmin)[0][0] + Imax = npw.where(coords<Xmax)[0][-1] + slc = slice(Imin, Imax) + view[i] = slc + except: + raise + raise ValueError('Wrong physical range.') + check_instance(view, list, values=(type(Ellipsis), slice), size=dfield.dim) + view = tuple(view[i] for i in range(dfield.dim) if (i not in axes)) + check_instance(view, tuple, values=(type(Ellipsis), slice), size=dfield.dim-len(axes)) + averaged_dfields[dfield] = (view, axes) + self.averaged_dfields = averaged_dfields + + def setup(self, work=None): + super(ComputeMeanField, self).setup(work=work) + path = self.io_params.filename + if not os.path.exists(path): + os.makedirs(path) + self.path = path + self.write_counter = 0 + + def filename(self, field, i): + return '{}/{}_{}'.format(self.path, field.name, i) + + @op_apply + def apply(self, simulation, **kwds): + if (simulation is None): + raise ValueError("Missing simulation value for monitoring.") + should_dump = (self.io_params.frequency>0) and (ite % self.io_params.frequency == 0) + should_dump |= simulation.is_time_of_interest + if should_dump: + for (dfield, (view, axes)) in self.averaged_dfields.iteritems(): + filename = self.filename(dfield, self.write_counter) + arrays = {} + for (i,data) in enumerate(dfield.data): + data = data.mean(axis=axes) + data = data[view] + if (dfield.nb_components==1): + arrays['data'] = data + else: + arrays['data_'+str(i)] = data + npw.savez_compressed(file=filename, **arrays) + self.write_counter += 1 + + @classmethod + def supported_backends(cls): + return Backend.all + + + diff --git a/hysop/operators.py b/hysop/operators.py index 6ace017592ff694a21856ff9b96f6a2bcf26e2f0..2e9794dd74f3dd7753216da026e1f78944a6387a 100644 --- a/hysop/operators.py +++ b/hysop/operators.py @@ -11,6 +11,7 @@ from hysop.operator.diffusion import Diffusion # FFTW diffusion from hysop.operator.redistribute import Redistribute from hysop.operator.analytic import AnalyticField +from hysop.operator.mean_field import ComputeMeanField from hysop.operator.enstrophy import Enstrophy from hysop.operator.kinetic_energy import KineticEnergy from hysop.operator.adapt_timestep import AdaptiveTimeStep