From d5c24e4172fabe5ba06ea77861343c346f16161e Mon Sep 17 00:00:00 2001 From: istasm <mathieu.istas@outlook.com> Date: Mon, 7 Mar 2022 11:44:21 +0100 Subject: [PATCH] allow analysis as a keyword --- .gitignore | 232 ++--- webgeodyn/inout/pygeodyn_hdf5.py | 132 +-- webgeodyn/models/model.py | 1481 +++++++++++++++--------------- webgeodyn/models/models.py | 218 ++--- webgeodyn/obsdata/obsdata.py | 354 +++---- webgeodyn/server/__init__.py | 0 webgeodyn/server/datahandlers.py | 0 webgeodyn/server/server.py | 0 8 files changed, 1214 insertions(+), 1203 deletions(-) mode change 100644 => 100755 webgeodyn/server/__init__.py mode change 100644 => 100755 webgeodyn/server/datahandlers.py mode change 100644 => 100755 webgeodyn/server/server.py diff --git a/.gitignore b/.gitignore index 0c0ee9a..a602e4c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,116 +1,116 @@ -# Ignore Pycharm-generated folder -.idea/ - -# Ignore Pycharm-generated folder -.idea/ - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# rst files -*.rst - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -.static_storage/ -.media/ -local_settings.py - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ - -# Ignore test.py file -webgeodyn_data/test.py +# Ignore Pycharm-generated folder +.idea/ + +# Ignore Pycharm-generated folder +.idea/ + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# rst files +*.rst + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +.static_storage/ +.media/ +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# Ignore test.py file +webgeodyn_data/test.py diff --git a/webgeodyn/inout/pygeodyn_hdf5.py b/webgeodyn/inout/pygeodyn_hdf5.py index 60b1690..6f2e71b 100644 --- a/webgeodyn/inout/pygeodyn_hdf5.py +++ b/webgeodyn/inout/pygeodyn_hdf5.py @@ -1,66 +1,66 @@ -#-*- coding: utf-8 -*- -import glob -import h5py -import os -import numpy as np -from .default import giveMeasureTypeUnits - - -def load(dataDirectory, dataModel, keepRealisations, state_type='analysed'): - """ Loading function for pygeodyn files of hdf5 format. Also adds the data to the dataModel. - - :param dataDirectory: Location of the pygeodyn files - :type dataDirectory: os.path - :param dataModel: Model in which to add the loaded measures - :type dataModel: Model - :param keepRealisations: If True, all realisations are kept in the data. Else, the data is averaged over the realisations - :type keepRealisations: bool - :return: 0 if everything went well, -1 otherwise - :rtype: int - :param state_type: Either forecast, computed or analysed depending on the type of states needed - """ - firstpoint = 3 - assert state_type in ('computed', 'analysed', 'forecast') - - measures_to_load = ['MF', 'SV', 'ER', 'U'] - - hdf5_files = glob.glob(os.path.join(dataDirectory, '*.hdf5')) - if len(hdf5_files) == 0: - raise IOError('No hdf5 file was found in {} !'.format(dataDirectory)) - # Assuming that the file to read is the first one - hdf_filename = hdf5_files[0] - print('Reading:', hdf_filename, ' state_type:', state_type) - - with h5py.File(hdf_filename) as hdf_file: - computed_data = hdf_file[state_type] - - times = np.array(computed_data['times'])[firstpoint:] - - for measureName, measureData in computed_data.items(): - if measureName not in measures_to_load: - continue - else: - measureType, units = giveMeasureTypeUnits(measureName) - - # Move realisation axis to last place to have data of form [ntimes, ncoef, nreal] (originally [nreal, ntimes, ncoef]) - # Remove firstpoints - formattedData = np.moveaxis(measureData, 0, -1)[firstpoint:] - - if measureName == 'MF': - lmax = hdf_file.attrs['Lb'] - elif measureName == 'U': - lmax = hdf_file.attrs['Lu'] - else: - lmax = hdf_file.attrs['Lsv'] - - if keepRealisations: - dataModel.addMeasure(measureName, measureType, lmax, units, - formattedData, times=times) - else: - meanData = formattedData.mean(axis=2) - rmsData = formattedData.std(axis=2) - dataModel.addMeasure(measureName, measureType, lmax, units, - meanData, rmsData, times) - - # Returns 0 if everything went well - return 0 +#-*- coding: utf-8 -*- +import glob +import h5py +import os +import numpy as np +from .default import giveMeasureTypeUnits + + +def load(dataDirectory, dataModel, keepRealisations, state_type='analysed'): + """ Loading function for pygeodyn files of hdf5 format. Also adds the data to the dataModel. + + :param dataDirectory: Location of the pygeodyn files + :type dataDirectory: os.path + :param dataModel: Model in which to add the loaded measures + :type dataModel: Model + :param keepRealisations: If True, all realisations are kept in the data. Else, the data is averaged over the realisations + :type keepRealisations: bool + :return: 0 if everything went well, -1 otherwise + :rtype: int + :param state_type: Either forecast, computed or analysed depending on the type of states needed + """ + firstpoint = 3 + assert state_type in ('computed', 'analysed', 'forecast') + + measures_to_load = ['MF', 'SV', 'ER', 'U'] + + hdf5_files = glob.glob(os.path.join(dataDirectory, '*.hdf5')) + if len(hdf5_files) == 0: + raise IOError('No hdf5 file was found in {} !'.format(dataDirectory)) + # Assuming that the file to read is the first one + hdf_filename = hdf5_files[0] + print('Reading:', hdf_filename, ' state_type:', state_type) + + with h5py.File(hdf_filename) as hdf_file: + computed_data = hdf_file[state_type] + + times = np.array(computed_data['times'])[firstpoint:] + + for measureName, measureData in computed_data.items(): + if measureName not in measures_to_load: + continue + else: + measureType, units = giveMeasureTypeUnits(measureName) + + # Move realisation axis to last place to have data of form [ntimes, ncoef, nreal] (originally [nreal, ntimes, ncoef]) + # Remove firstpoints + formattedData = np.moveaxis(measureData, 0, -1)[firstpoint:] + + if measureName == 'MF': + lmax = hdf_file.attrs['Lb'] + elif measureName == 'U': + lmax = hdf_file.attrs['Lu'] + else: + lmax = hdf_file.attrs['Lsv'] + + if keepRealisations: + dataModel.addMeasure(measureName, measureType, lmax, units, + formattedData, times=times) + else: + meanData = formattedData.mean(axis=2) + rmsData = formattedData.std(axis=2) + dataModel.addMeasure(measureName, measureType, lmax, units, + meanData, rmsData, times) + + # Returns 0 if everything went well + return 0 diff --git a/webgeodyn/models/model.py b/webgeodyn/models/model.py index 2a208ad..d6f3f17 100644 --- a/webgeodyn/models/model.py +++ b/webgeodyn/models/model.py @@ -1,735 +1,746 @@ -import numpy as np -import os -import json -import importlib - -from webgeodyn.data import GHData, TSData -from webgeodyn.data.normPnm import semiNormalisedSchmidt, noNorm -from webgeodyn.constants import rE, rC - -ACCELERATION = False - - -class Model: - """ - Class handling models that contain measures data - """ - - # CONSTRUCTING METHODS - - def __init__(self, dataDirectory=None, dataFormat='default', nth=90, nph=180, normalisation=semiNormalisedSchmidt, - keepRealisations=True, state_type='analysed'): - """ - :param dataDirectory: directory where the data is located - :type dataDirectory: str (path) - :param dataFormat: format to use for loading the data - :type dataFormat: str - :param nth: size of the theta grid - :type nth: int - :param nph: size of the phi grid - :type nph: int - :param normalisation: normalisation to use for Legendre polynomials - :type normalisation: function - :param keepRealisations: indicating if realisations should be kept or averaged - :type keepRealisations: bool - """ - self.nth = nth - self.nph = nph - self.normalisation = normalisation - self.setGridSize(nth, nph) - self.times = None - - if normalisation is None: - self.PnmNorm = noNorm - else: - self.PnmNorm = normalisation - - self.dataDirectory = dataDirectory - # Put the dataFormat to lower for consistency with future checks - self.dataFormat = dataFormat.lower() - self.color = "#000000" - - # Measures that are under the form of Gauss coefficients (Spherical Harmonics) - self.measures = {} - self.measures_rms = {} - # Measures NOT under the form of Gauss coefficients - self.notSH_measures = {} - - if dataDirectory is not None: - self.loadDataFromFile(keepRealisations, state_type=state_type) - - def copy(self): - """ - Returns a copy of current model. - - :return: Copy of the model with same measures, normalisation and grid sizes. - :rtype: Model - """ - returnedModel = Model(nth=self.nth, nph=self.nph, normalisation=self.normalisation) - for measureName in self.measures: - measure = self.measures[measureName] - newcoefs = measure.data.copy() - if self.hasRMSData(measureName): - newcoefs_rms = self.measures_rms[measureName].data.copy() - else: - newcoefs_rms = None - - returnedModel.addMeasure(measureName, measure.measureType, measure.lmax, measure.units, newcoefs, - newcoefs_rms, times=self.times) - return returnedModel - - def hasRMSData(self, measureName): - """ - Checks if the measure exists and if RMS data exists for it. - - :param measureName: name of the measure to probe - :type measureName: str - :return: a boolean indicating if RMS data exists for the measure - :rtype: bool - """ - return (measureName in self.measures_rms) and (self.measures_rms[measureName].data is not None) - - def toMeanRMS(self): - """ - Converts a Model with realisations to a Model contaning only Mean and RMS. - - :return: Model with the same measures but under the form Mean/RMS - :rtype: Model - """ - returnedModel = Model(nth=self.nth, nph=self.nph, normalisation=self.normalisation) - for measureName in self.measures: - measure = self.measures[measureName] - newcoefs = np.mean(measure.data, axis=2) - newcoefs_rms = np.std(measure.data, axis=2) - returnedModel.addMeasure(measureName, measure.measureType, measure.lmax, measure.units, newcoefs, - newcoefs_rms, times=self.times) - - return returnedModel - - def clearCache(self): - """ - Clears the cache of all measures - """ - for measureName in self.measures: - self.measures[measureName].clearCache() - - def setGridSize(self, nth, nph): - """ - Sets the grid theta ]0,pi[ (size: nth+1) and phi grid [0, 2pi] (size: nph). Called in __init__. - - :param nth: size of the theta grid - :type nth: int - :param nph: size of the phi grid - :type nph: int - """ - self.nth = nth - self.nph = nph - self.th_step = np.pi / (nth + 1) - self.ph_step = 2 * np.pi / nph - - self.th = np.arange(np.pi / (nth + 1), np.pi, self.th_step) - self.ph = np.arange(0, 2 * np.pi, self.ph_step) - - def addMeasure(self, measureName, measureType, lmax, units=None, data=None, rmsdata=None, times=None): - """ - Adds a new measure to the model. Also computes secondary measures ('DIVHU' and 'LOD') for 'U'. - - :param measureName: Name of the measure - :type measureName: str - :param measureType: Type of the measure - :type measureType: str - :param lmax: Maximal degree - :type lmax: int - :param units: unit of the measure (default: None) - :type units: str - :param data: measure data (default: None, in this case, only creates a times array) - :type data: np.array - :param rmsdata: RMS data (optional: default to None) - :type rmsdata: np.array - :param times: times corresponding of the measure (default:None, in this case do nothing) - :type times: np.array - """ - if times is not None: - if self.times is not None: - if not np.allclose(self.times, times): - raise ValueError("Time array must be all the same for a given modeldata") - self.times = times - if measureType == "U" or (measureType == "DIVHU" or measureType == "dU/dt"): - if data is not None: - self.measures[measureName] = TSData( - data, - int(lmax), - units, - measureType, - self.PnmNorm, - self.th, - self.ph - ) - - # Compute secondary measures only for U - if measureType == "U": - # Compute the divH if not already done and if DivHU was not added - if "DIVHU" not in self.measures: - divh_data = self.computeDivH(self.measures[measureName]) - - if divh_data.shape != data.shape: - raise RuntimeError("Computation of DivHU went wrong for measure={}".format(measureName)) - - # Once the data is built, add the measure DivHU to the model - self.measures["DIVHU"] = TSData( - divh_data, - int(lmax), - "yr-1", - "DIVHU", - self.PnmNorm, - self.th, - self.ph - ) - - if ACCELERATION: - acc_data = self.computeAcceleration() - self.measures["dU/dt"] = TSData( - acc_data, - int(lmax), - "km/yr2", - "dU/dt", - self.PnmNorm, - self.th, - self.ph - ) - - # Compute the LOD if not already done nor added - if "LOD" not in self.notSH_measures: - lod_data = self.computeLOD() - - # Once the data is built, add the measure LOD to the model - self.notSH_measures["LOD"] = lod_data - - if rmsdata is not None: - self.measures_rms[measureName] = TSData( - rmsdata, - int(lmax), - units, - measureType, - self.PnmNorm, - self.th, - self.ph, - ) - else: - if data is not None: - self.measures[measureName] = GHData( - data, - int(lmax), - units, - measureType, - self.PnmNorm, - self.th, - self.ph - ) - if rmsdata is not None: - self.measures_rms[measureName] = GHData( - rmsdata, - int(lmax), - units, - measureType, - self.PnmNorm, - self.th, - self.ph - ) - return - - def loadDataFromFile(self, keepRealisations, state_type='analysed'): - """ - Loads the data from files according to the dataDirectory and dataFormat of the model. Uses dedicated load functions of the inout module. - - :param keepRealisations: indicating if realisations should be kept or averaged - :type keepRealisations: bool - """ - self.measures = {} - self.measures_rms = {} - - # If no specified format, read "dataFormat" file in data directory - if self.dataFormat is None: - if os.path.exists(os.path.join(self.dataDirectory, "dataFormat")): - with open(os.path.join(self.dataDirectory, "dataFormat"), 'r') as f: - self.dataFormat = f.readline().rstrip() - - # If specified data format (or dataFormat file), read via the built-in format from inout.py - if self.dataFormat is not None: - print("Reading model using predefined format", self.dataFormat) - load_fct = importlib.import_module("webgeodyn.inout." + self.dataFormat).load - try: - load_fct(self.dataDirectory, self, keepRealisations, state_type=state_type) - except TypeError: # state_type is a valid argument only for pygeodyn hdf5 simulations for the moment - load_fct(self.dataDirectory, self, keepRealisations) - - # Else raise an error - else: - raise ValueError("dataFormat is not defined.") - - def saveDataToFile(self, dataDirectory, forceOverwrite=False, dataformat="default"): - """ - Saves the data of the model in a designated format. Uses dedicated save functions of the inout module. - - :param dataDirectory: directory where to save the file - :type dataDirectory: str (path) - :param forceOverwrite: indicates if files of the same name should be overwritten - :type forceOverwrite: bool - :param dataformat: dataformat under which the data should be saved (default: "default") - :type dataformat: str - """ - save_fct = importlib.import_module("webgeodyn.inout." + dataformat).save - save_fct(dataDirectory, self, forceOverwrite) - - # GETTERS CALLED BY AJAX REQUESTS - - def getSpherHarmData(self, measureName, l, mlist, removemean, computeallrealisation=False, irealisation=-1): - """ - Extracts the spherical harmonic coefficients of a measure for asked degrees and orders. - - :param measureName: name of the measure - :type measureName: str - :param l: degree for which to extract coefficients - :type l: int - :param mlist: orders for which to extract coefficients - :type mlist: int or list of ints - :param removemean: indicates if the temporal mean should be removed - :type removemean: bool - :param computeallrealisation: not used - :type computeallrealisation: - :param irealisation: not used - :type irealisation: - :return: JSON containing the times ('times') and coefficients of the measure for the asked degrees and orders ('data') - :rtype: dict - """ - jsondata = {"times": self.times.tolist(), "data": {}, "unit": self.measures[measureName].units} - - if mlist is None: - mlist = np.arange(l + 1) # Returns all m for given l - if isinstance(mlist, int): - mlist = [mlist] # Return a single m - - print("ASKING FOR l=", l, "m=", mlist) - - for m in mlist: - jsondata["data"][str(m)] = {} - for coef in self.measures[measureName].coefs_list: - k = self.measures[measureName].lm2k(int(l), int(m), coef) - if k is None: - continue # it has no k indice (e.g. h10) - jsondata["data"][str(m)][coef] = {} - - if self.measures[measureName].has_realisations: - if removemean: - jsondata["data"][str(m)][coef]["values"] = np.mean( - (self.measures[measureName].data[:, k] - self.measures[measureName].coefs_mean[k]), - axis=1).tolist() - else: - jsondata["data"][str(m)][coef]["values"] = np.mean(self.measures[measureName].data[:, k], - axis=1).tolist() - jsondata["data"][str(m)][coef]["rms"] = np.std(self.measures[measureName].data[:, k], - axis=1).tolist() - else: - if removemean: - jsondata["data"][str(m)][coef]["values"] = (self.measures[measureName].data[:, k] - self.measures[measureName].coefs_mean[k]).tolist() - else: - jsondata["data"][str(m)][coef]["values"] = self.measures[measureName].data[:, k].tolist() - - if self.hasRMSData(measureName): - jsondata["data"][str(m)][coef]["rms"] = np.hstack( - self.measures_rms[measureName].data[:, k]).tolist() - # Comment: hstack on rms is for returning an array containing [left,right,left,right,left,...] if each rms point is [left,right] - - return jsondata - - def getTimeSerieData(self, measureName, vectorComponent, cutvariable, cutvalue, removemean, coordtype, - computeallrealisation=False, irealisation=-1): - """ - Computes the time evolution of a component (radial or angular) of a measure. - - :param measureName: name of the measure - :type measureName: str - :param vectorComponent: vector component to compute - :type vectorComponent: str - :param cutvariable: variable along which the cut is made - :type cutvariable: str ('phi' or 'theta') - :param cutvalue: value of the cut angle - :type cutvalue: float - :param removemean: indicates if the temporal mean should be removed - :type removemean: bool - :param coordtype: type of the coordinate asked (angle or curvilinear (s_north or s_south)) - :type coordtype: str - :param computeallrealisation: boolean indicating if the computation should be on all realisations or on the mean (default=False) - :type computeallrealisation: bool - :param irealisation: index of the realisation to use (default=-1 and takes the mean instead) - :type irealisation: int - :return: JSON containing the times ('times'), the angles used for the computation ('angles') and the time series of the measure ('data') - :rtype: dict - """ - - jsondata = {"times": self.times.tolist(), "data": {}} - - def get_coords_list(input_th, coord_type): - """ Get data for computation and display from coord_type (angle or curvilinear) """ - if coord_type == "s_north": - display_data = np.linspace(0, 1., len(input_th))[1:-1] - angles_to_compute = np.arcsin(display_data) - elif coord_type == "s_south": - # Compute angles for s = ]-1,0[ to have the correct sampling... - s = np.linspace(-1, 0., len(input_th))[1:-1] - # ...but add np.pi to have the angle between ]pi/2,pi[ (South)... - angles_to_compute = np.arcsin(s) + np.pi - # Recompute y_data in consequence - display_data = np.sin(angles_to_compute) - # Flip both to have the same order as s_north - angles_to_compute = np.flip(angles_to_compute) - display_data = np.flip(display_data) - else: - display_data = (input_th * 180 / np.pi) - angles_to_compute = input_th - return angles_to_compute, display_data.tolist() - - # Separate treatment for geostrophic component - if vectorComponent == 'geos': - th, jsondata['angles'] = get_coords_list(self.th, coordtype) - data = self.measures[measureName].computeUGeostrophic(rC, th, computeallrealisation=computeallrealisation, irealisation=irealisation) - - else: - if cutvalue is None: - raise TypeError('{} is not a valid number'.format(cutvalue)) - cutvalue = cutvalue / 180 * np.pi - if cutvariable == "phi": - ph = [cutvalue] - th, jsondata['angles'] = get_coords_list(self.th, coordtype) - elif cutvariable == "theta": - th = [cutvalue] - ph = self.ph - jsondata["angles"] = (ph * 180 / np.pi).tolist() - else: - print("ERROR, cutvariable should be theta or phi") - return jsondata - - data = self.measures[measureName].computeRThetaPhiData(rC, th, ph, [vectorComponent], - computeallrealisation=computeallrealisation, - irealisation=irealisation)[vectorComponent] - - data = data.reshape((data.shape[0], data.shape[1] * data.shape[2])) - - # Common operations - if removemean: - data = data - np.mean(data, axis=0) - - jsondata["unit"] = self.measures[measureName].units - jsondata["data"] = data.tolist() - - return jsondata - - def getObservatoryData(self, measureName, r, strth, strph): - """ - Computes all components of a measure to compare it with observatory data. - - :param measureName: name of the measure - :type measureName: str - :param r: radius where to evaluate the measure components - :type r: float - :param strth: theta where to evaluate the measure components (under string format, in degrees) - :type strth: str - :param strph: phi where to evaluate the measure components (under string format, in degrees) - :type strph: str - :return: JSON containing the times ('times'), the parameters used for the computation ('r', 'th', 'ph), the measure parameters ('unit', 'measuretype') and the computed components ('mean' and 'rms' if present) - :rtype: dict - """ - jsondata = {} - - # Or compute it from this data - jsondata["mean"] = {"times": self.times.tolist()} - if self.measures[measureName].has_realisations: - jsondata["rms"] = {"times": self.times.tolist()} - data = self.measures[measureName].computeRThetaPhiData(float(r), [float(strth) / 180 * np.pi], - [float(strph) / 180 * np.pi], ["r", "th", "ph"], - computeallrealisation=True) - else: - data = self.measures[measureName].computeRThetaPhiData(float(r), [float(strth) / 180 * np.pi], - [float(strph) / 180 * np.pi], ["r", "th", "ph"]) - - for component in data: - if self.measures[measureName].has_realisations: - jsondata["mean"][component] = np.mean(data[component], axis=3).flatten().tolist() - jsondata["rms"][component] = np.std(data[component], axis=3).flatten().tolist() - else: - jsondata["mean"][component] = data[component].flatten().tolist() - - jsondata["r"] = float(r) - jsondata["th"] = float(strth) - jsondata["ph"] = float(strph) - jsondata["unit"] = self.measures[measureName].units - jsondata["measuretype"] = self.measures[measureName].measureType - return jsondata - - def getDataInfo(self): - """ - Gets info on the model (measures, times and color). - - :return: JSON containing the measures type and lmax ('measures'), the times ('times') and the color ('color') of the model. - :rtype: dict - """ - datainfo = { - "measures": - {measureName: { - "type": self.measures[measureName].measureType, - "lmax": self.measures[measureName].lmax, - } for measureName in self.measures}, - "notSH_measures": - {measureName: { - "type": measureName - } for measureName in self.notSH_measures}, - "times": self.times.tolist(), - "color": self.color, - } - - return datainfo - - def getFileInfo(self): - """ - Gets the info on the location and the format of the files used to load the model. - - :return: JSON containing the data directory ('directory'), the format of the files ('format') and if the measures have rms ('rms'). - :rtype: dict - """ - fileinfo = { - "directory": self.dataDirectory, - "format": self.dataFormat, - "rms": {measureName: self.hasRMSData(measureName) for measureName in self.measures} - } - - return fileinfo - - def canGetGlobeData(self, measureName, vectorComponent, r=rC): - """ - Gets the component to compute to compute vectorComponent of a measure. - - :param measureName: name of the measure - :type measureName: str - :param vectorComponent: component to compute - :type vectorComponent: str - :param r: radius where the component must be computed (default: constants.rC) - :type r: float - :return: the components to compute and the state of computation - :rtype: tuple - """ - if vectorComponent is None: - # Test whole vector - oneComponentToCompute = False - oneComponentComputing = False - for component in self.measures[measureName].components: - isComponentToCompute, isComponentComputing = self.measures[measureName].isAlreadyComputed(r, component) - # If at least one isComponentToCompute is True, oneComponentToCompute is True - oneComponentToCompute = isComponentToCompute or oneComponentToCompute - # Idem for oneComponentComputing - oneComponentComputing = isComponentComputing or oneComponentComputing - return oneComponentToCompute, oneComponentComputing - else: - isComponentToCompute, isComponentComputing = self.measures[measureName].isAlreadyComputed(r, - vectorComponent) - return isComponentToCompute, isComponentComputing - - def getGlobeDataComputingState(self, measureName): - """ - Gets the computing state of a measure. - - :param measureName: name of the measure to check - :type measureName: str - :return: computing state - :rtype: float - """ - return self.measures[measureName].computingState - - def getGlobeData(self, measureName, vectorComponent, itime, removemean, r=rC): - """ - Computes a component of a measure on the whole globe. - - :param measureName: Name of the measure - :type measureName: str - :param vectorComponent: component to compute - :type vectorComponent: str - :param itime: index of the date at which the computation will be done - :type itime: int - :param removemean: indicates if the temporal mean should be removed - :type removemean: bool - :param r: radius where the component must be computed (default: constants.rC) - :type r: float - :return: JSON containing the parameters of the computation ("theta_step" in degrees, "phi_step" in degrees, "time"), the unit of the measure ("unit") and the data computed on the angular grid ('dataarray'). - :rtype: dict - """ - jsondata = {"theta_step": self.th_step * 180 / np.pi, - "phi_step": self.ph_step * 180 / np.pi, - "time": self.times[itime], - "unit": self.measures[measureName].units} - if vectorComponent is None: - # Return whole vector - data = [] - for component in self.measures[measureName].components: - print(component, "in", self.measures[measureName].components) - componentData = self.measures[measureName].getRThetaPhiGridData(r, itime, removemean, component) - if componentData is not None: - data.append(componentData) - gridData = np.dstack(tuple(componentData for componentData in data)).tolist() - else: - gridData = self.measures[measureName].getRThetaPhiGridData(r, itime, removemean, vectorComponent).tolist() - - jsondata["dataarray"] = gridData - return jsondata - - def getSpectraData(self, measureName, spectraType, spectraDate): - """ - Returns a JSON with the Lowes spectra of the asked type and measure. - - :param measureName: name of the measure used of the computation - :type measureName: str - :param spectraType: type of spectra ("spectraofmean", "meanofspectra", "dated" or "deviation") - :type spectraType: str - :param spectraDate: date at which the timed spectrum should be evaluated - :type spectraDate: float - :return: JSON containing the degrees, the Lowes spectrum and the units of the measure - :rtype: dict - """ - measure = self.measures[measureName] - - if spectraType == "dated": - date_index = np.where(self.times == spectraDate) - # If no index was found, raise an error - if len(date_index) == 0: - raise KeyError('{} is not a valid date of model {}'.format(spectraDate, self.dataFormat)) - # Else compute for the first occurrence - else: - itime = date_index[0][0] - else: - itime = 0 - - degrees, lowes_spectrum = measure.computeSpectra(spectraType, spectraDate, itime) - - jsondata = {'degrees': degrees.tolist(), 'data': lowes_spectrum.tolist(), - 'unit': measure.units, 'has_reals': measure.has_realisations} - - return jsondata - - def getLodData(self, removemean): - loddata = self.notSH_measures['LOD'] - - if removemean: - loddata = loddata - loddata.mean(axis=0) - - if loddata.ndim > 1: - mean_lod = loddata.mean(axis=1) - rms_lod = loddata.std(axis=1) - else: - mean_lod = loddata - rms_lod = np.zeros_like(loddata) - - return {'times': self.times.tolist(), 'data': mean_lod.tolist(), 'rmsdata': rms_lod.tolist()} - - # COMPUTATION OF SECONDARY MEASURES - - def computeAcceleration(self): - """ - Computes the acceleration dU/dt by finite differences. The 'U' measure must be loaded. - - :return: NumPy array containing the computed acceleration - :rtype: np.array (same shape as measure U data) - """ - if "U" not in self.measures: - raise ValueError("Cannot compute acceleration: no measure for core flow was found !") - measure_U = self.measures["U"].data - - # Get unique times - unique_times, unique_indexes = np.unique(self.times, return_index=True) - dt = unique_times[1] - unique_times[0] - - acceleration = np.zeros_like(measure_U) - - # Compute acceleration for each unique time... - for i_unique in unique_indexes: - if i_unique == 0: - continue - # dU/dt(t) = U(t) - U(t-dt) / dt - acc = (measure_U[i_unique] - measure_U[i_unique-1])/dt - current_t = self.times[i_unique] - # ...but set it at every index corresponding to the current time - current_indexes = np.where(self.times == current_t)[0] - for i_t in current_indexes: - acceleration[i_t] = acc - - return acceleration - - def computeDivH(self, measure_U): - """ Computes the coefficients of DivH from a flow measure. - - :param measure_U: Core flow measure - :type measure_U: TSData - :return: NumPy array containing the computed DivHU - :rtype: np.array (same shape as measure U data) - """ - if not isinstance(measure_U, TSData): - raise ValueError("Cannot compute DivH(U): the given core flow measure has not the right type !") - - # DivH is computed only from poloidal coeffs - # so I build a bool array that is True only for poloidal coeffs - is_s = np.logical_or(measure_U.is_sc, measure_U.is_ss) - L = measure_U.l - - # If U has realisations, expand the dims to be able to broadcast - if measure_U.has_realisations: - is_s = measure_U.expand_dims(is_s) - L = measure_U.expand_dims(L) - - Lplus1 = L + 1 - - # The coeffs of DivH are the poloidal ones of U multiplied by -l(l+1)/rC - # DivH has no toroidal coeffs so these are put to 0 - data_divh = np.where(is_s, -L * Lplus1 * measure_U.data / rC, 0) - - return data_divh - - def computeLOD(self): - """ Computes the length-of-day variation (LOD) from the measure of U. The 'U' measure must be loaded. - - :return: NumPy array containing the computed LOD - :rtype: np.array (times, 1) or (times, 1, coef) - """ - if "U" not in self.measures: - raise ValueError("Cannot compute LOD: no measure for core flow was found !") - measure_U = self.measures["U"].data - - lmax = self.measures["U"].lmax - - # If there are realisations put the coef axis at the end - if self.measures["U"].has_realisations: - measure_U = np.swapaxes(measure_U, 1, 2) - - # Compute LOD with the available data (realisations included if present) - LOD = measure_U[..., 0] - if lmax > 1: - LOD = LOD + 1.776 * measure_U[..., 8] - if lmax > 2: - LOD = LOD + 0.080 * measure_U[..., 24] - if lmax > 3: - LOD = LOD + 0.002 * measure_U[..., 48] - LOD = LOD * 1.232 - - return LOD - - def computeLowes(self, gauss_coefs, measureName, squared=True): - """ - Computes the Lowes spectrum from the gauss coefficients given for the measure. Handles broadcasting as long as the last dimension is the number of gauss coefficients. - Delegates to the computeLowes method of the measure. - - :param gauss_coefs: NumPy array containing the (squared or not) gauss coeffs for every degree. The last dimension must be the number of gauss coefficients. - :type gauss_coefs: np.array (dim: ..., nb_coeffs) - :param measureName: name of the measure used for computation - :type measureName: str - :param squared: indicates if the supplied gauss_coefs are already squared or not - :type squared: bool - :return: NumPy array containing the degrees, NumPy array containing the Lowes spectra - :rtype: np.array(dim: lmax), np.array (dim: ..., lmax) - """ - return self.measures[measureName].computeLowes(gauss_coefs, squared=squared) +import numpy as np +import os +import json +import importlib + +from webgeodyn.data import GHData, TSData +from webgeodyn.data.normPnm import semiNormalisedSchmidt, noNorm +from webgeodyn.constants import rE, rC + +ACCELERATION = False + + +class Model: + """ + Class handling models that contain measures data + """ + + # CONSTRUCTING METHODS + + def __init__(self, dataDirectory=None, dataFormat='default', nth=90, nph=180, normalisation=semiNormalisedSchmidt, + keepRealisations=True, state_type='analysed'): + """ + :param dataDirectory: directory where the data is located + :type dataDirectory: str (path) + :param dataFormat: format to use for loading the data + :type dataFormat: str + :param nth: size of the theta grid + :type nth: int + :param nph: size of the phi grid + :type nph: int + :param normalisation: normalisation to use for Legendre polynomials + :type normalisation: function + :param keepRealisations: indicating if realisations should be kept or averaged + :type keepRealisations: bool + """ + self.nth = nth + self.nph = nph + self.normalisation = normalisation + self.setGridSize(nth, nph) + self.times = None + + if normalisation is None: + self.PnmNorm = noNorm + else: + self.PnmNorm = normalisation + + self.dataDirectory = dataDirectory + # Put the dataFormat to lower for consistency with future checks + self.dataFormat = dataFormat.lower() + self.color = "#000000" + + # Measures that are under the form of Gauss coefficients (Spherical Harmonics) + self.measures = {} + self.measures_rms = {} + # Measures NOT under the form of Gauss coefficients + self.notSH_measures = {} + + if dataDirectory is not None: + try: + self.loadDataFromFile(keepRealisations, state_type=state_type) + except KeyError as ke: + # Old pygeodyn files have the analysis keyword instead of analysed, give it another try with the key analysis instead of analysed, or vice versa. + if state_type == 'analysis': + state_type = 'analysed' + elif state_type == 'analysed': + state_type = 'analysis' + else: + raise ke + + self.loadDataFromFile(keepRealisations, state_type=state_type) + + def copy(self): + """ + Returns a copy of current model. + + :return: Copy of the model with same measures, normalisation and grid sizes. + :rtype: Model + """ + returnedModel = Model(nth=self.nth, nph=self.nph, normalisation=self.normalisation) + for measureName in self.measures: + measure = self.measures[measureName] + newcoefs = measure.data.copy() + if self.hasRMSData(measureName): + newcoefs_rms = self.measures_rms[measureName].data.copy() + else: + newcoefs_rms = None + + returnedModel.addMeasure(measureName, measure.measureType, measure.lmax, measure.units, newcoefs, + newcoefs_rms, times=self.times) + return returnedModel + + def hasRMSData(self, measureName): + """ + Checks if the measure exists and if RMS data exists for it. + + :param measureName: name of the measure to probe + :type measureName: str + :return: a boolean indicating if RMS data exists for the measure + :rtype: bool + """ + return (measureName in self.measures_rms) and (self.measures_rms[measureName].data is not None) + + def toMeanRMS(self): + """ + Converts a Model with realisations to a Model contaning only Mean and RMS. + + :return: Model with the same measures but under the form Mean/RMS + :rtype: Model + """ + returnedModel = Model(nth=self.nth, nph=self.nph, normalisation=self.normalisation) + for measureName in self.measures: + measure = self.measures[measureName] + newcoefs = np.mean(measure.data, axis=2) + newcoefs_rms = np.std(measure.data, axis=2) + returnedModel.addMeasure(measureName, measure.measureType, measure.lmax, measure.units, newcoefs, + newcoefs_rms, times=self.times) + + return returnedModel + + def clearCache(self): + """ + Clears the cache of all measures + """ + for measureName in self.measures: + self.measures[measureName].clearCache() + + def setGridSize(self, nth, nph): + """ + Sets the grid theta ]0,pi[ (size: nth+1) and phi grid [0, 2pi] (size: nph). Called in __init__. + + :param nth: size of the theta grid + :type nth: int + :param nph: size of the phi grid + :type nph: int + """ + self.nth = nth + self.nph = nph + self.th_step = np.pi / (nth + 1) + self.ph_step = 2 * np.pi / nph + + self.th = np.arange(np.pi / (nth + 1), np.pi, self.th_step) + self.ph = np.arange(0, 2 * np.pi, self.ph_step) + + def addMeasure(self, measureName, measureType, lmax, units=None, data=None, rmsdata=None, times=None): + """ + Adds a new measure to the model. Also computes secondary measures ('DIVHU' and 'LOD') for 'U'. + + :param measureName: Name of the measure + :type measureName: str + :param measureType: Type of the measure + :type measureType: str + :param lmax: Maximal degree + :type lmax: int + :param units: unit of the measure (default: None) + :type units: str + :param data: measure data (default: None, in this case, only creates a times array) + :type data: np.array + :param rmsdata: RMS data (optional: default to None) + :type rmsdata: np.array + :param times: times corresponding of the measure (default:None, in this case do nothing) + :type times: np.array + """ + if times is not None: + if self.times is not None: + if not np.allclose(self.times, times): + raise ValueError("Time array must be all the same for a given modeldata") + self.times = times + if measureType == "U" or (measureType == "DIVHU" or measureType == "dU/dt"): + if data is not None: + self.measures[measureName] = TSData( + data, + int(lmax), + units, + measureType, + self.PnmNorm, + self.th, + self.ph + ) + + # Compute secondary measures only for U + if measureType == "U": + # Compute the divH if not already done and if DivHU was not added + if "DIVHU" not in self.measures: + divh_data = self.computeDivH(self.measures[measureName]) + + if divh_data.shape != data.shape: + raise RuntimeError("Computation of DivHU went wrong for measure={}".format(measureName)) + + # Once the data is built, add the measure DivHU to the model + self.measures["DIVHU"] = TSData( + divh_data, + int(lmax), + "yr-1", + "DIVHU", + self.PnmNorm, + self.th, + self.ph + ) + + if ACCELERATION: + acc_data = self.computeAcceleration() + self.measures["dU/dt"] = TSData( + acc_data, + int(lmax), + "km/yr2", + "dU/dt", + self.PnmNorm, + self.th, + self.ph + ) + + # Compute the LOD if not already done nor added + if "LOD" not in self.notSH_measures: + lod_data = self.computeLOD() + + # Once the data is built, add the measure LOD to the model + self.notSH_measures["LOD"] = lod_data + + if rmsdata is not None: + self.measures_rms[measureName] = TSData( + rmsdata, + int(lmax), + units, + measureType, + self.PnmNorm, + self.th, + self.ph, + ) + else: + if data is not None: + self.measures[measureName] = GHData( + data, + int(lmax), + units, + measureType, + self.PnmNorm, + self.th, + self.ph + ) + if rmsdata is not None: + self.measures_rms[measureName] = GHData( + rmsdata, + int(lmax), + units, + measureType, + self.PnmNorm, + self.th, + self.ph + ) + return + + def loadDataFromFile(self, keepRealisations, state_type='analysed'): + """ + Loads the data from files according to the dataDirectory and dataFormat of the model. Uses dedicated load functions of the inout module. + + :param keepRealisations: indicating if realisations should be kept or averaged + :type keepRealisations: bool + """ + self.measures = {} + self.measures_rms = {} + + # If no specified format, read "dataFormat" file in data directory + if self.dataFormat is None: + if os.path.exists(os.path.join(self.dataDirectory, "dataFormat")): + with open(os.path.join(self.dataDirectory, "dataFormat"), 'r') as f: + self.dataFormat = f.readline().rstrip() + + # If specified data format (or dataFormat file), read via the built-in format from inout.py + if self.dataFormat is not None: + print("Reading model using predefined format", self.dataFormat) + load_fct = importlib.import_module("webgeodyn.inout." + self.dataFormat).load + try: + load_fct(self.dataDirectory, self, keepRealisations, state_type=state_type) + except TypeError: # state_type is a valid argument only for pygeodyn hdf5 simulations for the moment + load_fct(self.dataDirectory, self, keepRealisations) + + # Else raise an error + else: + raise ValueError("dataFormat is not defined.") + + def saveDataToFile(self, dataDirectory, forceOverwrite=False, dataformat="default"): + """ + Saves the data of the model in a designated format. Uses dedicated save functions of the inout module. + + :param dataDirectory: directory where to save the file + :type dataDirectory: str (path) + :param forceOverwrite: indicates if files of the same name should be overwritten + :type forceOverwrite: bool + :param dataformat: dataformat under which the data should be saved (default: "default") + :type dataformat: str + """ + save_fct = importlib.import_module("webgeodyn.inout." + dataformat).save + save_fct(dataDirectory, self, forceOverwrite) + + # GETTERS CALLED BY AJAX REQUESTS + + def getSpherHarmData(self, measureName, l, mlist, removemean, computeallrealisation=False, irealisation=-1): + """ + Extracts the spherical harmonic coefficients of a measure for asked degrees and orders. + + :param measureName: name of the measure + :type measureName: str + :param l: degree for which to extract coefficients + :type l: int + :param mlist: orders for which to extract coefficients + :type mlist: int or list of ints + :param removemean: indicates if the temporal mean should be removed + :type removemean: bool + :param computeallrealisation: not used + :type computeallrealisation: + :param irealisation: not used + :type irealisation: + :return: JSON containing the times ('times') and coefficients of the measure for the asked degrees and orders ('data') + :rtype: dict + """ + jsondata = {"times": self.times.tolist(), "data": {}, "unit": self.measures[measureName].units} + + if mlist is None: + mlist = np.arange(l + 1) # Returns all m for given l + if isinstance(mlist, int): + mlist = [mlist] # Return a single m + + print("ASKING FOR l=", l, "m=", mlist) + + for m in mlist: + jsondata["data"][str(m)] = {} + for coef in self.measures[measureName].coefs_list: + k = self.measures[measureName].lm2k(int(l), int(m), coef) + if k is None: + continue # it has no k indice (e.g. h10) + jsondata["data"][str(m)][coef] = {} + + if self.measures[measureName].has_realisations: + if removemean: + jsondata["data"][str(m)][coef]["values"] = np.mean( + (self.measures[measureName].data[:, k] - self.measures[measureName].coefs_mean[k]), + axis=1).tolist() + else: + jsondata["data"][str(m)][coef]["values"] = np.mean(self.measures[measureName].data[:, k], + axis=1).tolist() + jsondata["data"][str(m)][coef]["rms"] = np.std(self.measures[measureName].data[:, k], + axis=1).tolist() + else: + if removemean: + jsondata["data"][str(m)][coef]["values"] = (self.measures[measureName].data[:, k] - self.measures[measureName].coefs_mean[k]).tolist() + else: + jsondata["data"][str(m)][coef]["values"] = self.measures[measureName].data[:, k].tolist() + + if self.hasRMSData(measureName): + jsondata["data"][str(m)][coef]["rms"] = np.hstack( + self.measures_rms[measureName].data[:, k]).tolist() + # Comment: hstack on rms is for returning an array containing [left,right,left,right,left,...] if each rms point is [left,right] + + return jsondata + + def getTimeSerieData(self, measureName, vectorComponent, cutvariable, cutvalue, removemean, coordtype, + computeallrealisation=False, irealisation=-1): + """ + Computes the time evolution of a component (radial or angular) of a measure. + + :param measureName: name of the measure + :type measureName: str + :param vectorComponent: vector component to compute + :type vectorComponent: str + :param cutvariable: variable along which the cut is made + :type cutvariable: str ('phi' or 'theta') + :param cutvalue: value of the cut angle + :type cutvalue: float + :param removemean: indicates if the temporal mean should be removed + :type removemean: bool + :param coordtype: type of the coordinate asked (angle or curvilinear (s_north or s_south)) + :type coordtype: str + :param computeallrealisation: boolean indicating if the computation should be on all realisations or on the mean (default=False) + :type computeallrealisation: bool + :param irealisation: index of the realisation to use (default=-1 and takes the mean instead) + :type irealisation: int + :return: JSON containing the times ('times'), the angles used for the computation ('angles') and the time series of the measure ('data') + :rtype: dict + """ + + jsondata = {"times": self.times.tolist(), "data": {}} + + def get_coords_list(input_th, coord_type): + """ Get data for computation and display from coord_type (angle or curvilinear) """ + if coord_type == "s_north": + display_data = np.linspace(0, 1., len(input_th))[1:-1] + angles_to_compute = np.arcsin(display_data) + elif coord_type == "s_south": + # Compute angles for s = ]-1,0[ to have the correct sampling... + s = np.linspace(-1, 0., len(input_th))[1:-1] + # ...but add np.pi to have the angle between ]pi/2,pi[ (South)... + angles_to_compute = np.arcsin(s) + np.pi + # Recompute y_data in consequence + display_data = np.sin(angles_to_compute) + # Flip both to have the same order as s_north + angles_to_compute = np.flip(angles_to_compute) + display_data = np.flip(display_data) + else: + display_data = (input_th * 180 / np.pi) + angles_to_compute = input_th + return angles_to_compute, display_data.tolist() + + # Separate treatment for geostrophic component + if vectorComponent == 'geos': + th, jsondata['angles'] = get_coords_list(self.th, coordtype) + data = self.measures[measureName].computeUGeostrophic(rC, th, computeallrealisation=computeallrealisation, irealisation=irealisation) + + else: + if cutvalue is None: + raise TypeError('{} is not a valid number'.format(cutvalue)) + cutvalue = cutvalue / 180 * np.pi + if cutvariable == "phi": + ph = [cutvalue] + th, jsondata['angles'] = get_coords_list(self.th, coordtype) + elif cutvariable == "theta": + th = [cutvalue] + ph = self.ph + jsondata["angles"] = (ph * 180 / np.pi).tolist() + else: + print("ERROR, cutvariable should be theta or phi") + return jsondata + + data = self.measures[measureName].computeRThetaPhiData(rC, th, ph, [vectorComponent], + computeallrealisation=computeallrealisation, + irealisation=irealisation)[vectorComponent] + + data = data.reshape((data.shape[0], data.shape[1] * data.shape[2])) + + # Common operations + if removemean: + data = data - np.mean(data, axis=0) + + jsondata["unit"] = self.measures[measureName].units + jsondata["data"] = data.tolist() + + return jsondata + + def getObservatoryData(self, measureName, r, strth, strph): + """ + Computes all components of a measure to compare it with observatory data. + + :param measureName: name of the measure + :type measureName: str + :param r: radius where to evaluate the measure components + :type r: float + :param strth: theta where to evaluate the measure components (under string format, in degrees) + :type strth: str + :param strph: phi where to evaluate the measure components (under string format, in degrees) + :type strph: str + :return: JSON containing the times ('times'), the parameters used for the computation ('r', 'th', 'ph), the measure parameters ('unit', 'measuretype') and the computed components ('mean' and 'rms' if present) + :rtype: dict + """ + jsondata = {} + + # Or compute it from this data + jsondata["mean"] = {"times": self.times.tolist()} + if self.measures[measureName].has_realisations: + jsondata["rms"] = {"times": self.times.tolist()} + data = self.measures[measureName].computeRThetaPhiData(float(r), [float(strth) / 180 * np.pi], + [float(strph) / 180 * np.pi], ["r", "th", "ph"], + computeallrealisation=True) + else: + data = self.measures[measureName].computeRThetaPhiData(float(r), [float(strth) / 180 * np.pi], + [float(strph) / 180 * np.pi], ["r", "th", "ph"]) + + for component in data: + if self.measures[measureName].has_realisations: + jsondata["mean"][component] = np.mean(data[component], axis=3).flatten().tolist() + jsondata["rms"][component] = np.std(data[component], axis=3).flatten().tolist() + else: + jsondata["mean"][component] = data[component].flatten().tolist() + + jsondata["r"] = float(r) + jsondata["th"] = float(strth) + jsondata["ph"] = float(strph) + jsondata["unit"] = self.measures[measureName].units + jsondata["measuretype"] = self.measures[measureName].measureType + return jsondata + + def getDataInfo(self): + """ + Gets info on the model (measures, times and color). + + :return: JSON containing the measures type and lmax ('measures'), the times ('times') and the color ('color') of the model. + :rtype: dict + """ + datainfo = { + "measures": + {measureName: { + "type": self.measures[measureName].measureType, + "lmax": self.measures[measureName].lmax, + } for measureName in self.measures}, + "notSH_measures": + {measureName: { + "type": measureName + } for measureName in self.notSH_measures}, + "times": self.times.tolist(), + "color": self.color, + } + + return datainfo + + def getFileInfo(self): + """ + Gets the info on the location and the format of the files used to load the model. + + :return: JSON containing the data directory ('directory'), the format of the files ('format') and if the measures have rms ('rms'). + :rtype: dict + """ + fileinfo = { + "directory": self.dataDirectory, + "format": self.dataFormat, + "rms": {measureName: self.hasRMSData(measureName) for measureName in self.measures} + } + + return fileinfo + + def canGetGlobeData(self, measureName, vectorComponent, r=rC): + """ + Gets the component to compute to compute vectorComponent of a measure. + + :param measureName: name of the measure + :type measureName: str + :param vectorComponent: component to compute + :type vectorComponent: str + :param r: radius where the component must be computed (default: constants.rC) + :type r: float + :return: the components to compute and the state of computation + :rtype: tuple + """ + if vectorComponent is None: + # Test whole vector + oneComponentToCompute = False + oneComponentComputing = False + for component in self.measures[measureName].components: + isComponentToCompute, isComponentComputing = self.measures[measureName].isAlreadyComputed(r, component) + # If at least one isComponentToCompute is True, oneComponentToCompute is True + oneComponentToCompute = isComponentToCompute or oneComponentToCompute + # Idem for oneComponentComputing + oneComponentComputing = isComponentComputing or oneComponentComputing + return oneComponentToCompute, oneComponentComputing + else: + isComponentToCompute, isComponentComputing = self.measures[measureName].isAlreadyComputed(r, + vectorComponent) + return isComponentToCompute, isComponentComputing + + def getGlobeDataComputingState(self, measureName): + """ + Gets the computing state of a measure. + + :param measureName: name of the measure to check + :type measureName: str + :return: computing state + :rtype: float + """ + return self.measures[measureName].computingState + + def getGlobeData(self, measureName, vectorComponent, itime, removemean, r=rC): + """ + Computes a component of a measure on the whole globe. + + :param measureName: Name of the measure + :type measureName: str + :param vectorComponent: component to compute + :type vectorComponent: str + :param itime: index of the date at which the computation will be done + :type itime: int + :param removemean: indicates if the temporal mean should be removed + :type removemean: bool + :param r: radius where the component must be computed (default: constants.rC) + :type r: float + :return: JSON containing the parameters of the computation ("theta_step" in degrees, "phi_step" in degrees, "time"), the unit of the measure ("unit") and the data computed on the angular grid ('dataarray'). + :rtype: dict + """ + jsondata = {"theta_step": self.th_step * 180 / np.pi, + "phi_step": self.ph_step * 180 / np.pi, + "time": self.times[itime], + "unit": self.measures[measureName].units} + if vectorComponent is None: + # Return whole vector + data = [] + for component in self.measures[measureName].components: + print(component, "in", self.measures[measureName].components) + componentData = self.measures[measureName].getRThetaPhiGridData(r, itime, removemean, component) + if componentData is not None: + data.append(componentData) + gridData = np.dstack(tuple(componentData for componentData in data)).tolist() + else: + gridData = self.measures[measureName].getRThetaPhiGridData(r, itime, removemean, vectorComponent).tolist() + + jsondata["dataarray"] = gridData + return jsondata + + def getSpectraData(self, measureName, spectraType, spectraDate): + """ + Returns a JSON with the Lowes spectra of the asked type and measure. + + :param measureName: name of the measure used of the computation + :type measureName: str + :param spectraType: type of spectra ("spectraofmean", "meanofspectra", "dated" or "deviation") + :type spectraType: str + :param spectraDate: date at which the timed spectrum should be evaluated + :type spectraDate: float + :return: JSON containing the degrees, the Lowes spectrum and the units of the measure + :rtype: dict + """ + measure = self.measures[measureName] + + if spectraType == "dated": + date_index = np.where(self.times == spectraDate) + # If no index was found, raise an error + if len(date_index) == 0: + raise KeyError('{} is not a valid date of model {}'.format(spectraDate, self.dataFormat)) + # Else compute for the first occurrence + else: + itime = date_index[0][0] + else: + itime = 0 + + degrees, lowes_spectrum = measure.computeSpectra(spectraType, spectraDate, itime) + + jsondata = {'degrees': degrees.tolist(), 'data': lowes_spectrum.tolist(), + 'unit': measure.units, 'has_reals': measure.has_realisations} + + return jsondata + + def getLodData(self, removemean): + loddata = self.notSH_measures['LOD'] + + if removemean: + loddata = loddata - loddata.mean(axis=0) + + if loddata.ndim > 1: + mean_lod = loddata.mean(axis=1) + rms_lod = loddata.std(axis=1) + else: + mean_lod = loddata + rms_lod = np.zeros_like(loddata) + + return {'times': self.times.tolist(), 'data': mean_lod.tolist(), 'rmsdata': rms_lod.tolist()} + + # COMPUTATION OF SECONDARY MEASURES + + def computeAcceleration(self): + """ + Computes the acceleration dU/dt by finite differences. The 'U' measure must be loaded. + + :return: NumPy array containing the computed acceleration + :rtype: np.array (same shape as measure U data) + """ + if "U" not in self.measures: + raise ValueError("Cannot compute acceleration: no measure for core flow was found !") + measure_U = self.measures["U"].data + + # Get unique times + unique_times, unique_indexes = np.unique(self.times, return_index=True) + dt = unique_times[1] - unique_times[0] + + acceleration = np.zeros_like(measure_U) + + # Compute acceleration for each unique time... + for i_unique in unique_indexes: + if i_unique == 0: + continue + # dU/dt(t) = U(t) - U(t-dt) / dt + acc = (measure_U[i_unique] - measure_U[i_unique-1])/dt + current_t = self.times[i_unique] + # ...but set it at every index corresponding to the current time + current_indexes = np.where(self.times == current_t)[0] + for i_t in current_indexes: + acceleration[i_t] = acc + + return acceleration + + def computeDivH(self, measure_U): + """ Computes the coefficients of DivH from a flow measure. + + :param measure_U: Core flow measure + :type measure_U: TSData + :return: NumPy array containing the computed DivHU + :rtype: np.array (same shape as measure U data) + """ + if not isinstance(measure_U, TSData): + raise ValueError("Cannot compute DivH(U): the given core flow measure has not the right type !") + + # DivH is computed only from poloidal coeffs + # so I build a bool array that is True only for poloidal coeffs + is_s = np.logical_or(measure_U.is_sc, measure_U.is_ss) + L = measure_U.l + + # If U has realisations, expand the dims to be able to broadcast + if measure_U.has_realisations: + is_s = measure_U.expand_dims(is_s) + L = measure_U.expand_dims(L) + + Lplus1 = L + 1 + + # The coeffs of DivH are the poloidal ones of U multiplied by -l(l+1)/rC + # DivH has no toroidal coeffs so these are put to 0 + data_divh = np.where(is_s, -L * Lplus1 * measure_U.data / rC, 0) + + return data_divh + + def computeLOD(self): + """ Computes the length-of-day variation (LOD) from the measure of U. The 'U' measure must be loaded. + + :return: NumPy array containing the computed LOD + :rtype: np.array (times, 1) or (times, 1, coef) + """ + if "U" not in self.measures: + raise ValueError("Cannot compute LOD: no measure for core flow was found !") + measure_U = self.measures["U"].data + + lmax = self.measures["U"].lmax + + # If there are realisations put the coef axis at the end + if self.measures["U"].has_realisations: + measure_U = np.swapaxes(measure_U, 1, 2) + + # Compute LOD with the available data (realisations included if present) + LOD = measure_U[..., 0] + if lmax > 1: + LOD = LOD + 1.776 * measure_U[..., 8] + if lmax > 2: + LOD = LOD + 0.080 * measure_U[..., 24] + if lmax > 3: + LOD = LOD + 0.002 * measure_U[..., 48] + LOD = LOD * 1.232 + + return LOD + + def computeLowes(self, gauss_coefs, measureName, squared=True): + """ + Computes the Lowes spectrum from the gauss coefficients given for the measure. Handles broadcasting as long as the last dimension is the number of gauss coefficients. + Delegates to the computeLowes method of the measure. + + :param gauss_coefs: NumPy array containing the (squared or not) gauss coeffs for every degree. The last dimension must be the number of gauss coefficients. + :type gauss_coefs: np.array (dim: ..., nb_coeffs) + :param measureName: name of the measure used for computation + :type measureName: str + :param squared: indicates if the supplied gauss_coefs are already squared or not + :type squared: bool + :return: NumPy array containing the degrees, NumPy array containing the Lowes spectra + :rtype: np.array(dim: lmax), np.array (dim: ..., lmax) + """ + return self.measures[measureName].computeLowes(gauss_coefs, squared=squared) diff --git a/webgeodyn/models/models.py b/webgeodyn/models/models.py index 8fc32c2..cd73c14 100644 --- a/webgeodyn/models/models.py +++ b/webgeodyn/models/models.py @@ -1,109 +1,109 @@ -import os.path -from itertools import cycle -from .model import Model - - -class Models(dict): - """ - Dictionnary containing the loaded models - """ - def __init__(self): - super().__init__() - # ui_colors are taken so that consecutive colors are visually very different (10-class Paired on http://colorbrewer2.org) - self.ui_colors = cycle(['#1f78b4', '#33a02c', '#e31a1c', '#ff7f00', '#6a3d9a', '#a6cee3', '#b2df8a', '#fb9a99', '#fdbf6f', '#cab2d6']) - self.model_colors = {"archeomag": "#ffaaaa", - "ced": "#ff0000", - "chaos": "#4daf4a", - "covobs": "#984ea3", - "enscore": "#af694a", - "pygeodyn_asc": "#ff00ff", - "pygeodyn_hdf5": "#ff00ff", - "midpath": "#00ff00", - "nath": "#0000ff", - "s1fs": "#aaaaaa", - "s1fs2": "#aaaaaa", - "zforecast": "#e41a1c"} - - def addModel(self, modelName, model, color=None): - """ - Add a model to the list of loaded model. - - :param modelName: name of the model - :type modelName: str - :param model: Model to add - :type model: Model - :param color: color to use for plots of the model (default: None, in this case, uses a dedicated color for each model) - :type color: str (hexadecimal form) - """ - if color is None: - try: - color = self.model_colors[model.dataFormat] - except KeyError: - # Use ui_colors if this fails - color = 'ui' - - if color == 'ui': - color = next(self.ui_colors) - - # Checking hexadecimal form of the color - if len(color) != 7 or not color.startswith('#'): - raise ValueError('Color of {} should be given in hexadecimal form ! Got "{}" instead.'.format(modelName, color)) - - model.color = color - self[modelName] = model - - def loadModel(self, dataDirectory, modelName=None, dataFormat='default', color=None, keepRealisations=True, state_type='analysed'): - """ - Creates a Model by loading the files in dataDirectory using the dataFormat and the calls addModel. Note that it sets first the allocated space in the dict to False in case of asynchronous thread checking the loading state. - - :param dataDirectory: Directory where the model files are located - :type dataDirectory: str (path) - :param modelName: name of the model to use. If None, the basename of the dataDirectory will be used. - :type modelName: str or None - :param dataFormat: format to use to load the model data. Default is 'default'. - :type dataFormat: str - :param color: color of the model to use for plots (in hex form) - :type color: str - :param keepRealisations: indicates with realisations should be kept or averaged - :type keepRealisations: bool - """ - if modelName is None: - modelName = os.path.basename(dataDirectory) - - self[modelName] = False - self.addModel(modelName, Model(dataDirectory, dataFormat, keepRealisations=keepRealisations, state_type=state_type), color) - - def isModelLoading(self, modelName): - """ - Checks if the model is present in Models and if it is being loaded. - - :param modelName: name of the model to check - :type modelName: str - :return: the state of loading of the model (True means loading) - :rtype: bool - """ - return (modelName in self) and (self[modelName] is False) - - def getModelList(self): - """ - Gets the list of model names. - - :return: list of model names - :rtype: list - """ - return list(self.keys()) - - def isCached(self, modelName): - """ - Checks if a model is present and loaded. - - :param modelName: name of the model to check - :type modelName: str - :return: the presence of the model in Models (True means present and loaded) - :rtype: bool - """ - if modelName not in self: - return False - if self[modelName] is False: - return False - return True +import os.path +from itertools import cycle +from .model import Model + + +class Models(dict): + """ + Dictionnary containing the loaded models + """ + def __init__(self): + super().__init__() + # ui_colors are taken so that consecutive colors are visually very different (10-class Paired on http://colorbrewer2.org) + self.ui_colors = cycle(['#1f78b4', '#33a02c', '#e31a1c', '#ff7f00', '#6a3d9a', '#a6cee3', '#b2df8a', '#fb9a99', '#fdbf6f', '#cab2d6']) + self.model_colors = {"archeomag": "#ffaaaa", + "ced": "#ff0000", + "chaos": "#4daf4a", + "covobs": "#984ea3", + "enscore": "#af694a", + "pygeodyn_asc": "#ff00ff", + "pygeodyn_hdf5": "#ff00ff", + "midpath": "#00ff00", + "nath": "#0000ff", + "s1fs": "#aaaaaa", + "s1fs2": "#aaaaaa", + "zforecast": "#e41a1c"} + + def addModel(self, modelName, model, color=None): + """ + Add a model to the list of loaded model. + + :param modelName: name of the model + :type modelName: str + :param model: Model to add + :type model: Model + :param color: color to use for plots of the model (default: None, in this case, uses a dedicated color for each model) + :type color: str (hexadecimal form) + """ + if color is None: + try: + color = self.model_colors[model.dataFormat] + except KeyError: + # Use ui_colors if this fails + color = 'ui' + + if color == 'ui': + color = next(self.ui_colors) + + # Checking hexadecimal form of the color + if len(color) != 7 or not color.startswith('#'): + raise ValueError('Color of {} should be given in hexadecimal form ! Got "{}" instead.'.format(modelName, color)) + + model.color = color + self[modelName] = model + + def loadModel(self, dataDirectory, modelName=None, dataFormat='default', color=None, keepRealisations=True, state_type='analysed'): + """ + Creates a Model by loading the files in dataDirectory using the dataFormat and the calls addModel. Note that it sets first the allocated space in the dict to False in case of asynchronous thread checking the loading state. + + :param dataDirectory: Directory where the model files are located + :type dataDirectory: str (path) + :param modelName: name of the model to use. If None, the basename of the dataDirectory will be used. + :type modelName: str or None + :param dataFormat: format to use to load the model data. Default is 'default'. + :type dataFormat: str + :param color: color of the model to use for plots (in hex form) + :type color: str + :param keepRealisations: indicates with realisations should be kept or averaged + :type keepRealisations: bool + """ + if modelName is None: + modelName = os.path.basename(dataDirectory) + + self[modelName] = False + self.addModel(modelName, Model(dataDirectory, dataFormat, keepRealisations=keepRealisations, state_type=state_type), color) + + def isModelLoading(self, modelName): + """ + Checks if the model is present in Models and if it is being loaded. + + :param modelName: name of the model to check + :type modelName: str + :return: the state of loading of the model (True means loading) + :rtype: bool + """ + return (modelName in self) and (self[modelName] is False) + + def getModelList(self): + """ + Gets the list of model names. + + :return: list of model names + :rtype: list + """ + return list(self.keys()) + + def isCached(self, modelName): + """ + Checks if a model is present and loaded. + + :param modelName: name of the model to check + :type modelName: str + :return: the presence of the model in Models (True means present and loaded) + :rtype: bool + """ + if modelName not in self: + return False + if self[modelName] is False: + return False + return True diff --git a/webgeodyn/obsdata/obsdata.py b/webgeodyn/obsdata/obsdata.py index d2e04f6..58d0d09 100644 --- a/webgeodyn/obsdata/obsdata.py +++ b/webgeodyn/obsdata/obsdata.py @@ -1,177 +1,177 @@ -import numpy as np -import os -from .observatory import ObservatoryGroup -import cdflib - -class ObsData: - """ - Class handling the list of ObservatoryGroups and interfaces it with the webpage. - """ - def __init__(self, obs_directory=None): - """ - Creates the obsGroups dict and loads the data from the current directory. - """ - self.obsGroups = {} - if obs_directory is None: - obs_directory = os.path.dirname(__file__) - self.loadFromDirectory(obs_directory) - - def getDataInfo(self): - """ - Returns a JSON with group names as keys. - - The Values of the JSON are also dictionaries with the following keys,values : - - - 'coordinates': coordinates of the observatories of the group - - 'search_radius': search radius of the group - - 'display_r': display radius of the group - - """ - jsondata = {} - for obsGroupName in self.obsGroups: - jsondata[obsGroupName] = { - "coordinates": self.obsGroups[obsGroupName].coordinates, - "search_radius": self.obsGroups[obsGroupName].search_radius, - "display_r": self.obsGroups[obsGroupName].display_r - } - return jsondata - - def addObsGroup(self, groupName, display_r, search_radius=None): - """ - Creates a new ObservatoryGroup. - - :param groupName: name of the group to create. Raises an Error if it already exists. - :type groupName: str - :param display_r: radius where the group data should be displayed - :type display_r: float or None - :param search_radius: ? - :type search_radius: - """ - if groupName in self.obsGroups: - raise ValueError("An observatories group named %s already exists." % groupName) - self.obsGroups[groupName] = ObservatoryGroup(groupName, display_r, search_radius) - - def getObsR(self, th, ph, groupName): - """ - :param th: colatitude at which the r should be fetched. - :type th: float - :param ph: azimuth at which the r should be fetched. - :type ph: float - :param groupName: name of the group in which the data should be fetched. - :type groupName: - :return: radius of the group - :rtype: float - """ - return self.obsGroups[groupName].getObsR(th, ph) - - def getData(self, measureName, th, ph, groupName): - """ - Gets the observed data for the given parameters. - - :param measureName: name of the measure to fetch - :type measureName: str - :param th: colatitude at which the data should be fetched. - :type th: float - :param ph: azimuth at which the data should be fetched. - :type ph: float - :param groupName: name of the group for which the data should be fetched - :type groupName: str - :return: Temporal array of the observed data at th,ph. - :rtype: np.array - """ - return self.obsGroups[groupName].getObservatoryData(th, ph, measureName) - - def find_cdf_files(self, path): - """ - find all files with .cdf extension in path directory and - returns the list of 4-month data and the 1-month data. - """ - brut_files_1M, brut_files_4M = [], [] - for dirpath, dirnames, filenames in os.walk(path): - for filename in [f for f in filenames if f.endswith(".cdf")]: - name = os.path.join(dirpath, filename) - if '4M' in filename: - brut_files_4M.append(name) - elif '1M' in filename: - brut_files_1M.append(name) - else: - print('file {} do not match requirements'.format(filename)) - return brut_files_1M, brut_files_4M - - def loadFromDirectory(self, dataDirectory, cdf_type='4M'): - """ - Reads VO files into CHAMP, SWARM, GO and other satellites files in Magnetic ObservatoryGroups. - - :param dataDirectory: directory where the files are located - :type dataDirectory: str - """ - # Read CHAMP, SWARM, Oersted, Cryosat and Composite cdf data files - possible_ids = np.array(['GROUND', 'CHAMP', 'SWARM', 'OERSTED', 'CRYOSAT', 'COMPOSITE']) - cdf_name_shortcuts = ['GO', 'CH', 'SW', 'OR', 'CR', 'CO'] - - # find all cdf filenames in data directory - cdf_files_1M, cdf_files_4M = self.find_cdf_files(dataDirectory) - - for obsGroupName, cdf_shortcut in zip(possible_ids, cdf_name_shortcuts): - GO = True if obsGroupName == 'GROUND' else False - - if cdf_type == '4M': - cdf_files = cdf_files_4M - elif cdf_type == '1M': - cdf_files = cdf_files_1M - else: - raise ValueError('cdf_type {} not recognized'.format(cdf_type)) - # extract file that starts with the cdf name shortcut, i.e. GO, CH, SW.... - try: - idx = [name.split('/')[-1][:2] for name in cdf_files].index(cdf_shortcut) - except ValueError: - print('{} not found'.format(possible_ids[cdf_name_shortcuts.index(cdf_shortcut)])) - cdf_filename = cdf_files[idx] - - cdf_file = cdflib.CDF(cdf_filename) - - print("Reading {} data from {}".format(obsGroupName, cdf_filename)) - - thetas = 90 - cdf_file.varget("Latitude") # convert in colatitude - if np.any(thetas > 180) or np.any(thetas < 0): - raise ValueError('angle th {} represent the colatitude, did you use latitudes instead?'.format(th)) - phis = cdf_file.varget("Longitude") - radii = cdf_file.varget('Radius') / 1000 # to convert in km - if GO: - locs = [''.join([l[0] for l in loc]) for loc in cdf_file.varget('Obs')] - - # because VOs circles should be bigger than GOs - if GO: - self.addObsGroup(obsGroupName, display_r=6371.2, search_radius=None) - else: - # assume unique radius for all VOs through time - self.addObsGroup(obsGroupName, display_r=radii[0], search_radius=850) - - times_stamp = cdf_file.varget('Timestamp') - dec_times = [] - for t in times_stamp: - year, month = cdflib.cdfepoch.breakdown_epoch(t)[:2] - dec_times.append(year + month / 12) - - Bs, SVs = cdf_file.varget('B_CF'), cdf_file.varget('B_SV') - if GO: - Bs -= cdf_file.varget('bias_crust') - - for i, (B, SV, r, th, ph, time) in enumerate(zip(Bs, SVs, radii, thetas, phis, dec_times)): - if self.obsGroups[obsGroupName].display_r is None and not GO: - self.obsGroups[obsGroupName].display_r = r - - if GO: - self.obsGroups[obsGroupName].addObservatory(locs[i], r, th, ph) - else: - self.obsGroups[obsGroupName].addObservatory(obsGroupName, r, th, ph) # Add if not exists - - if np.any(np.isnan(B)): - continue - else: - self.obsGroups[obsGroupName].addObservatoryData(th, ph, 'MF', time, B[0], B[1], B[2]) - - if np.any(np.isnan(SV)): - continue - else: - self.obsGroups[obsGroupName].addObservatoryData(th, ph, 'SV', time, SV[0], SV[1], SV[2]) +import numpy as np +import os +from .observatory import ObservatoryGroup +import cdflib + +class ObsData: + """ + Class handling the list of ObservatoryGroups and interfaces it with the webpage. + """ + def __init__(self, obs_directory=None): + """ + Creates the obsGroups dict and loads the data from the current directory. + """ + self.obsGroups = {} + if obs_directory is None: + obs_directory = os.path.dirname(__file__) + self.loadFromDirectory(obs_directory) + + def getDataInfo(self): + """ + Returns a JSON with group names as keys. + + The Values of the JSON are also dictionaries with the following keys,values : + + - 'coordinates': coordinates of the observatories of the group + - 'search_radius': search radius of the group + - 'display_r': display radius of the group + + """ + jsondata = {} + for obsGroupName in self.obsGroups: + jsondata[obsGroupName] = { + "coordinates": self.obsGroups[obsGroupName].coordinates, + "search_radius": self.obsGroups[obsGroupName].search_radius, + "display_r": self.obsGroups[obsGroupName].display_r + } + return jsondata + + def addObsGroup(self, groupName, display_r, search_radius=None): + """ + Creates a new ObservatoryGroup. + + :param groupName: name of the group to create. Raises an Error if it already exists. + :type groupName: str + :param display_r: radius where the group data should be displayed + :type display_r: float or None + :param search_radius: ? + :type search_radius: + """ + if groupName in self.obsGroups: + raise ValueError("An observatories group named %s already exists." % groupName) + self.obsGroups[groupName] = ObservatoryGroup(groupName, display_r, search_radius) + + def getObsR(self, th, ph, groupName): + """ + :param th: colatitude at which the r should be fetched. + :type th: float + :param ph: azimuth at which the r should be fetched. + :type ph: float + :param groupName: name of the group in which the data should be fetched. + :type groupName: + :return: radius of the group + :rtype: float + """ + return self.obsGroups[groupName].getObsR(th, ph) + + def getData(self, measureName, th, ph, groupName): + """ + Gets the observed data for the given parameters. + + :param measureName: name of the measure to fetch + :type measureName: str + :param th: colatitude at which the data should be fetched. + :type th: float + :param ph: azimuth at which the data should be fetched. + :type ph: float + :param groupName: name of the group for which the data should be fetched + :type groupName: str + :return: Temporal array of the observed data at th,ph. + :rtype: np.array + """ + return self.obsGroups[groupName].getObservatoryData(th, ph, measureName) + + def find_cdf_files(self, path): + """ + find all files with .cdf extension in path directory and + returns the list of 4-month data and the 1-month data. + """ + brut_files_1M, brut_files_4M = [], [] + for dirpath, dirnames, filenames in os.walk(path): + for filename in [f for f in filenames if f.endswith(".cdf")]: + name = os.path.join(dirpath, filename) + if '4M' in filename: + brut_files_4M.append(name) + elif '1M' in filename: + brut_files_1M.append(name) + else: + print('file {} do not match requirements'.format(filename)) + return brut_files_1M, brut_files_4M + + def loadFromDirectory(self, dataDirectory, cdf_type='4M'): + """ + Reads VO files into CHAMP, SWARM, GO and other satellites files in Magnetic ObservatoryGroups. + + :param dataDirectory: directory where the files are located + :type dataDirectory: str + """ + # Read CHAMP, SWARM, Oersted, Cryosat and Composite cdf data files + possible_ids = np.array(['GROUND', 'CHAMP', 'SWARM', 'OERSTED', 'CRYOSAT', 'COMPOSITE']) + cdf_name_shortcuts = ['GO', 'CH', 'SW', 'OR', 'CR', 'CO'] + + # find all cdf filenames in data directory + cdf_files_1M, cdf_files_4M = self.find_cdf_files(dataDirectory) + + for obsGroupName, cdf_shortcut in zip(possible_ids, cdf_name_shortcuts): + GO = True if obsGroupName == 'GROUND' else False + + if cdf_type == '4M': + cdf_files = cdf_files_4M + elif cdf_type == '1M': + cdf_files = cdf_files_1M + else: + raise ValueError('cdf_type {} not recognized'.format(cdf_type)) + # extract file that starts with the cdf name shortcut, i.e. GO, CH, SW.... + try: + idx = [name.split('/')[-1][:2] for name in cdf_files].index(cdf_shortcut) + except ValueError: + print('{} not found'.format(possible_ids[cdf_name_shortcuts.index(cdf_shortcut)])) + cdf_filename = cdf_files[idx] + + cdf_file = cdflib.CDF(cdf_filename) + + print("Reading {} data from {}".format(obsGroupName, cdf_filename)) + + thetas = 90 - cdf_file.varget("Latitude") # convert in colatitude + if np.any(thetas > 180) or np.any(thetas < 0): + raise ValueError('angle th {} represent the colatitude, did you use latitudes instead?'.format(th)) + phis = cdf_file.varget("Longitude") + radii = cdf_file.varget('Radius') / 1000 # to convert in km + if GO: + locs = [''.join([l[0] for l in loc]) for loc in cdf_file.varget('Obs')] + + # because VOs circles should be bigger than GOs + if GO: + self.addObsGroup(obsGroupName, display_r=6371.2, search_radius=None) + else: + # assume unique radius for all VOs through time + self.addObsGroup(obsGroupName, display_r=radii[0], search_radius=850) + + times_stamp = cdf_file.varget('Timestamp') + dec_times = [] + for t in times_stamp: + year, month = cdflib.cdfepoch.breakdown_epoch(t)[:2] + dec_times.append(year + month / 12) + + Bs, SVs = cdf_file.varget('B_CF'), cdf_file.varget('B_SV') + if GO: + Bs -= cdf_file.varget('bias_crust') + + for i, (B, SV, r, th, ph, time) in enumerate(zip(Bs, SVs, radii, thetas, phis, dec_times)): + if self.obsGroups[obsGroupName].display_r is None and not GO: + self.obsGroups[obsGroupName].display_r = r + + if GO: + self.obsGroups[obsGroupName].addObservatory(locs[i], r, th, ph) + else: + self.obsGroups[obsGroupName].addObservatory(obsGroupName, r, th, ph) # Add if not exists + + if np.any(np.isnan(B)): + continue + else: + self.obsGroups[obsGroupName].addObservatoryData(th, ph, 'MF', time, B[0], B[1], B[2]) + + if np.any(np.isnan(SV)): + continue + else: + self.obsGroups[obsGroupName].addObservatoryData(th, ph, 'SV', time, SV[0], SV[1], SV[2]) diff --git a/webgeodyn/server/__init__.py b/webgeodyn/server/__init__.py old mode 100644 new mode 100755 diff --git a/webgeodyn/server/datahandlers.py b/webgeodyn/server/datahandlers.py old mode 100644 new mode 100755 diff --git a/webgeodyn/server/server.py b/webgeodyn/server/server.py old mode 100644 new mode 100755 -- GitLab