Skip to content
Snippets Groups Projects
Commit fe2903d0 authored by EXT Istas's avatar EXT Istas
Browse files
parents 2bf20fb9 a3d36d07
No related branches found
No related tags found
No related merge requests found
# Ignore Pycharm-generated folder
.idea/
# Ignore Pycharm-generated folder
.idea/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# rst files
*.rst
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
.static_storage/
.media/
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
# Ignore test.py file
webgeodyn_data/test.py
# Ignore Pycharm-generated folder
.idea/
# Ignore Pycharm-generated folder
.idea/
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# rst files
*.rst
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
.static_storage/
.media/
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
# Ignore test.py file
webgeodyn_data/test.py
......@@ -20,8 +20,7 @@ def load(dataDirectory, dataModel, keepRealisations, state_type='analysed'):
:param state_type: Either forecast, computed or analysed depending on the type of states needed
"""
firstpoint = 3
assert state_type in ('computed', 'analysed', 'forecast')
assert state_type in ('computed', 'analysed', 'analysis', 'forecast')
measures_to_load = ['MF', 'SV', 'ER', 'U']
hdf5_files = glob.glob(os.path.join(dataDirectory, '*.hdf5'))
......
This diff is collapsed.
import os.path
from itertools import cycle
from .model import Model
class Models(dict):
"""
Dictionnary containing the loaded models
"""
def __init__(self):
super().__init__()
# ui_colors are taken so that consecutive colors are visually very different (10-class Paired on http://colorbrewer2.org)
self.ui_colors = cycle(['#1f78b4', '#33a02c', '#e31a1c', '#ff7f00', '#6a3d9a', '#a6cee3', '#b2df8a', '#fb9a99', '#fdbf6f', '#cab2d6'])
self.model_colors = {"archeomag": "#ffaaaa",
"ced": "#ff0000",
"chaos": "#4daf4a",
"covobs": "#984ea3",
"enscore": "#af694a",
"pygeodyn_asc": "#ff00ff",
"pygeodyn_hdf5": "#ff00ff",
"midpath": "#00ff00",
"nath": "#0000ff",
"s1fs": "#aaaaaa",
"s1fs2": "#aaaaaa",
"zforecast": "#e41a1c"}
def addModel(self, modelName, model, color=None):
"""
Add a model to the list of loaded model.
:param modelName: name of the model
:type modelName: str
:param model: Model to add
:type model: Model
:param color: color to use for plots of the model (default: None, in this case, uses a dedicated color for each model)
:type color: str (hexadecimal form)
"""
if color is None:
try:
color = self.model_colors[model.dataFormat]
except KeyError:
# Use ui_colors if this fails
color = 'ui'
if color == 'ui':
color = next(self.ui_colors)
# Checking hexadecimal form of the color
if len(color) != 7 or not color.startswith('#'):
raise ValueError('Color of {} should be given in hexadecimal form ! Got "{}" instead.'.format(modelName, color))
model.color = color
self[modelName] = model
def loadModel(self, dataDirectory, modelName=None, dataFormat='default', color=None, keepRealisations=True, state_type='analysed'):
"""
Creates a Model by loading the files in dataDirectory using the dataFormat and the calls addModel. Note that it sets first the allocated space in the dict to False in case of asynchronous thread checking the loading state.
:param dataDirectory: Directory where the model files are located
:type dataDirectory: str (path)
:param modelName: name of the model to use. If None, the basename of the dataDirectory will be used.
:type modelName: str or None
:param dataFormat: format to use to load the model data. Default is 'default'.
:type dataFormat: str
:param color: color of the model to use for plots (in hex form)
:type color: str
:param keepRealisations: indicates with realisations should be kept or averaged
:type keepRealisations: bool
"""
if modelName is None:
modelName = os.path.basename(dataDirectory)
self[modelName] = False
self.addModel(modelName, Model(dataDirectory, dataFormat, keepRealisations=keepRealisations, state_type=state_type), color)
def isModelLoading(self, modelName):
"""
Checks if the model is present in Models and if it is being loaded.
:param modelName: name of the model to check
:type modelName: str
:return: the state of loading of the model (True means loading)
:rtype: bool
"""
return (modelName in self) and (self[modelName] is False)
def getModelList(self):
"""
Gets the list of model names.
:return: list of model names
:rtype: list
"""
return list(self.keys())
def isCached(self, modelName):
"""
Checks if a model is present and loaded.
:param modelName: name of the model to check
:type modelName: str
:return: the presence of the model in Models (True means present and loaded)
:rtype: bool
"""
if modelName not in self:
return False
if self[modelName] is False:
return False
return True
import os.path
from itertools import cycle
from .model import Model
class Models(dict):
"""
Dictionnary containing the loaded models
"""
def __init__(self):
super().__init__()
# ui_colors are taken so that consecutive colors are visually very different (10-class Paired on http://colorbrewer2.org)
self.ui_colors = cycle(['#1f78b4', '#33a02c', '#e31a1c', '#ff7f00', '#6a3d9a', '#a6cee3', '#b2df8a', '#fb9a99', '#fdbf6f', '#cab2d6'])
self.model_colors = {"archeomag": "#ffaaaa",
"ced": "#ff0000",
"chaos": "#4daf4a",
"covobs": "#984ea3",
"enscore": "#af694a",
"pygeodyn_asc": "#ff00ff",
"pygeodyn_hdf5": "#ff00ff",
"midpath": "#00ff00",
"nath": "#0000ff",
"s1fs": "#aaaaaa",
"s1fs2": "#aaaaaa",
"zforecast": "#e41a1c"}
def addModel(self, modelName, model, color=None):
"""
Add a model to the list of loaded model.
:param modelName: name of the model
:type modelName: str
:param model: Model to add
:type model: Model
:param color: color to use for plots of the model (default: None, in this case, uses a dedicated color for each model)
:type color: str (hexadecimal form)
"""
if color is None:
try:
color = self.model_colors[model.dataFormat]
except KeyError:
# Use ui_colors if this fails
color = 'ui'
if color == 'ui':
color = next(self.ui_colors)
# Checking hexadecimal form of the color
if len(color) != 7 or not color.startswith('#'):
raise ValueError('Color of {} should be given in hexadecimal form ! Got "{}" instead.'.format(modelName, color))
model.color = color
self[modelName] = model
def loadModel(self, dataDirectory, modelName=None, dataFormat='default', color=None, keepRealisations=True, state_type='analysed'):
"""
Creates a Model by loading the files in dataDirectory using the dataFormat and the calls addModel. Note that it sets first the allocated space in the dict to False in case of asynchronous thread checking the loading state.
:param dataDirectory: Directory where the model files are located
:type dataDirectory: str (path)
:param modelName: name of the model to use. If None, the basename of the dataDirectory will be used.
:type modelName: str or None
:param dataFormat: format to use to load the model data. Default is 'default'.
:type dataFormat: str
:param color: color of the model to use for plots (in hex form)
:type color: str
:param keepRealisations: indicates with realisations should be kept or averaged
:type keepRealisations: bool
"""
if modelName is None:
modelName = os.path.basename(dataDirectory)
self[modelName] = False
self.addModel(modelName, Model(dataDirectory, dataFormat, keepRealisations=keepRealisations, state_type=state_type), color)
def isModelLoading(self, modelName):
"""
Checks if the model is present in Models and if it is being loaded.
:param modelName: name of the model to check
:type modelName: str
:return: the state of loading of the model (True means loading)
:rtype: bool
"""
return (modelName in self) and (self[modelName] is False)
def getModelList(self):
"""
Gets the list of model names.
:return: list of model names
:rtype: list
"""
return list(self.keys())
def isCached(self, modelName):
"""
Checks if a model is present and loaded.
:param modelName: name of the model to check
:type modelName: str
:return: the presence of the model in Models (True means present and loaded)
:rtype: bool
"""
if modelName not in self:
return False
if self[modelName] is False:
return False
return True
import numpy as np
import os
from .observatory import ObservatoryGroup
import cdflib
class ObsData:
"""
Class handling the list of ObservatoryGroups and interfaces it with the webpage.
"""
def __init__(self, obs_directory=None):
"""
Creates the obsGroups dict and loads the data from the current directory.
"""
self.obsGroups = {}
if obs_directory is None:
obs_directory = os.path.dirname(__file__)
self.loadFromDirectory(obs_directory)
def getDataInfo(self):
"""
Returns a JSON with group names as keys.
The Values of the JSON are also dictionaries with the following keys,values :
- 'coordinates': coordinates of the observatories of the group
- 'search_radius': search radius of the group
- 'display_r': display radius of the group
"""
jsondata = {}
for obsGroupName in self.obsGroups:
jsondata[obsGroupName] = {
"coordinates": self.obsGroups[obsGroupName].coordinates,
"search_radius": self.obsGroups[obsGroupName].search_radius,
"display_r": self.obsGroups[obsGroupName].display_r
}
return jsondata
def addObsGroup(self, groupName, display_r, search_radius=None):
"""
Creates a new ObservatoryGroup.
:param groupName: name of the group to create. Raises an Error if it already exists.
:type groupName: str
:param display_r: radius where the group data should be displayed
:type display_r: float or None
:param search_radius: ?
:type search_radius:
"""
if groupName in self.obsGroups:
raise ValueError("An observatories group named %s already exists." % groupName)
self.obsGroups[groupName] = ObservatoryGroup(groupName, display_r, search_radius)
def getObsR(self, th, ph, groupName):
"""
:param th: colatitude at which the r should be fetched.
:type th: float
:param ph: azimuth at which the r should be fetched.
:type ph: float
:param groupName: name of the group in which the data should be fetched.
:type groupName:
:return: radius of the group
:rtype: float
"""
return self.obsGroups[groupName].getObsR(th, ph)
def getData(self, measureName, th, ph, groupName):
"""
Gets the observed data for the given parameters.
:param measureName: name of the measure to fetch
:type measureName: str
:param th: colatitude at which the data should be fetched.
:type th: float
:param ph: azimuth at which the data should be fetched.
:type ph: float
:param groupName: name of the group for which the data should be fetched
:type groupName: str
:return: Temporal array of the observed data at th,ph.
:rtype: np.array
"""
return self.obsGroups[groupName].getObservatoryData(th, ph, measureName)
def find_cdf_files(self, path):
"""
find all files with .cdf extension in path directory and
returns the list of 4-month data and the 1-month data.
"""
brut_files_1M, brut_files_4M = [], []
for dirpath, dirnames, filenames in os.walk(path):
for filename in [f for f in filenames if f.endswith(".cdf")]:
name = os.path.join(dirpath, filename)
if '4M' in filename:
brut_files_4M.append(name)
elif '1M' in filename:
brut_files_1M.append(name)
else:
print('file {} do not match requirements'.format(filename))
return brut_files_1M, brut_files_4M
def loadFromDirectory(self, dataDirectory, cdf_type='4M'):
"""
Reads VO files into CHAMP, SWARM, GO and other satellites files in Magnetic ObservatoryGroups.
:param dataDirectory: directory where the files are located
:type dataDirectory: str
"""
# Read CHAMP, SWARM, Oersted, Cryosat and Composite cdf data files
possible_ids = np.array(['GROUND', 'CHAMP', 'SWARM', 'OERSTED', 'CRYOSAT', 'COMPOSITE'])
cdf_name_shortcuts = ['GO', 'CH', 'SW', 'OR', 'CR', 'CO']
# find all cdf filenames in data directory
cdf_files_1M, cdf_files_4M = self.find_cdf_files(dataDirectory)
for obsGroupName, cdf_shortcut in zip(possible_ids, cdf_name_shortcuts):
GO = True if obsGroupName == 'GROUND' else False
if cdf_type == '4M':
cdf_files = cdf_files_4M
elif cdf_type == '1M':
cdf_files = cdf_files_1M
else:
raise ValueError('cdf_type {} not recognized'.format(cdf_type))
# extract file that starts with the cdf name shortcut, i.e. GO, CH, SW....
try:
idx = [name.split('/')[-1][:2] for name in cdf_files].index(cdf_shortcut)
except ValueError:
print('{} not found'.format(possible_ids[cdf_name_shortcuts.index(cdf_shortcut)]))
cdf_filename = cdf_files[idx]
cdf_file = cdflib.CDF(cdf_filename)
print("Reading {} data from {}".format(obsGroupName, cdf_filename))
thetas = 90 - cdf_file.varget("Latitude") # convert in colatitude
if np.any(thetas > 180) or np.any(thetas < 0):
raise ValueError('angle th {} represent the colatitude, did you use latitudes instead?'.format(th))
phis = cdf_file.varget("Longitude")
radii = cdf_file.varget('Radius') / 1000 # to convert in km
if GO:
locs = [''.join([l[0] for l in loc]) for loc in cdf_file.varget('Obs')]
# because VOs circles should be bigger than GOs
if GO:
self.addObsGroup(obsGroupName, display_r=6371.2, search_radius=None)
else:
# assume unique radius for all VOs through time
self.addObsGroup(obsGroupName, display_r=radii[0], search_radius=850)
times_stamp = cdf_file.varget('Timestamp')
dec_times = []
for t in times_stamp:
year, month = cdflib.cdfepoch.breakdown_epoch(t)[:2]
dec_times.append(year + month / 12)
Bs, SVs = cdf_file.varget('B_CF'), cdf_file.varget('B_SV')
if GO:
Bs -= cdf_file.varget('bias_crust')
for i, (B, SV, r, th, ph, time) in enumerate(zip(Bs, SVs, radii, thetas, phis, dec_times)):
if self.obsGroups[obsGroupName].display_r is None and not GO:
self.obsGroups[obsGroupName].display_r = r
if GO:
self.obsGroups[obsGroupName].addObservatory(locs[i], r, th, ph)
else:
self.obsGroups[obsGroupName].addObservatory(obsGroupName, r, th, ph) # Add if not exists
if np.any(np.isnan(B)):
continue
else:
self.obsGroups[obsGroupName].addObservatoryData(th, ph, 'MF', time, B[0], B[1], B[2])
if np.any(np.isnan(SV)):
continue
else:
self.obsGroups[obsGroupName].addObservatoryData(th, ph, 'SV', time, SV[0], SV[1], SV[2])
import numpy as np
import os
from .observatory import ObservatoryGroup
import cdflib
class ObsData:
"""
Class handling the list of ObservatoryGroups and interfaces it with the webpage.
"""
def __init__(self, obs_directory=None):
"""
Creates the obsGroups dict and loads the data from the current directory.
"""
self.obsGroups = {}
if obs_directory is None:
obs_directory = os.path.dirname(__file__)
self.loadFromDirectory(obs_directory)
def getDataInfo(self):
"""
Returns a JSON with group names as keys.
The Values of the JSON are also dictionaries with the following keys,values :
- 'coordinates': coordinates of the observatories of the group
- 'search_radius': search radius of the group
- 'display_r': display radius of the group
"""
jsondata = {}
for obsGroupName in self.obsGroups:
jsondata[obsGroupName] = {
"coordinates": self.obsGroups[obsGroupName].coordinates,
"search_radius": self.obsGroups[obsGroupName].search_radius,
"display_r": self.obsGroups[obsGroupName].display_r
}
return jsondata
def addObsGroup(self, groupName, display_r, search_radius=None):
"""
Creates a new ObservatoryGroup.
:param groupName: name of the group to create. Raises an Error if it already exists.
:type groupName: str
:param display_r: radius where the group data should be displayed
:type display_r: float or None
:param search_radius: ?
:type search_radius:
"""
if groupName in self.obsGroups:
raise ValueError("An observatories group named %s already exists." % groupName)
self.obsGroups[groupName] = ObservatoryGroup(groupName, display_r, search_radius)
def getObsR(self, th, ph, groupName):
"""
:param th: colatitude at which the r should be fetched.
:type th: float
:param ph: azimuth at which the r should be fetched.
:type ph: float
:param groupName: name of the group in which the data should be fetched.
:type groupName:
:return: radius of the group
:rtype: float
"""
return self.obsGroups[groupName].getObsR(th, ph)
def getData(self, measureName, th, ph, groupName):
"""
Gets the observed data for the given parameters.
:param measureName: name of the measure to fetch
:type measureName: str
:param th: colatitude at which the data should be fetched.
:type th: float
:param ph: azimuth at which the data should be fetched.
:type ph: float
:param groupName: name of the group for which the data should be fetched
:type groupName: str
:return: Temporal array of the observed data at th,ph.
:rtype: np.array
"""
return self.obsGroups[groupName].getObservatoryData(th, ph, measureName)
def find_cdf_files(self, path):
"""
find all files with .cdf extension in path directory and
returns the list of 4-month data and the 1-month data.
"""
brut_files_1M, brut_files_4M = [], []
for dirpath, dirnames, filenames in os.walk(path):
for filename in [f for f in filenames if f.endswith(".cdf")]:
name = os.path.join(dirpath, filename)
if '4M' in filename:
brut_files_4M.append(name)
elif '1M' in filename:
brut_files_1M.append(name)
else:
print('file {} do not match requirements'.format(filename))
return brut_files_1M, brut_files_4M
def loadFromDirectory(self, dataDirectory, cdf_type='4M'):
"""
Reads VO files into CHAMP, SWARM, GO and other satellites files in Magnetic ObservatoryGroups.
:param dataDirectory: directory where the files are located
:type dataDirectory: str
"""
# Read CHAMP, SWARM, Oersted, Cryosat and Composite cdf data files
possible_ids = np.array(['GROUND', 'CHAMP', 'SWARM', 'OERSTED', 'CRYOSAT', 'COMPOSITE'])
cdf_name_shortcuts = ['GO', 'CH', 'SW', 'OR', 'CR', 'CO']
# find all cdf filenames in data directory
cdf_files_1M, cdf_files_4M = self.find_cdf_files(dataDirectory)
for obsGroupName, cdf_shortcut in zip(possible_ids, cdf_name_shortcuts):
GO = True if obsGroupName == 'GROUND' else False
if cdf_type == '4M':
cdf_files = cdf_files_4M
elif cdf_type == '1M':
cdf_files = cdf_files_1M
else:
raise ValueError('cdf_type {} not recognized'.format(cdf_type))
# extract file that starts with the cdf name shortcut, i.e. GO, CH, SW....
try:
idx = [name.split('/')[-1][:2] for name in cdf_files].index(cdf_shortcut)
except ValueError:
print('{} not found'.format(possible_ids[cdf_name_shortcuts.index(cdf_shortcut)]))
cdf_filename = cdf_files[idx]
cdf_file = cdflib.CDF(cdf_filename)
print("Reading {} data from {}".format(obsGroupName, cdf_filename))
thetas = 90 - cdf_file.varget("Latitude") # convert in colatitude
if np.any(thetas > 180) or np.any(thetas < 0):
raise ValueError('angle th {} represent the colatitude, did you use latitudes instead?'.format(th))
phis = cdf_file.varget("Longitude")
radii = cdf_file.varget('Radius') / 1000 # to convert in km
if GO:
locs = [''.join([l[0] for l in loc]) for loc in cdf_file.varget('Obs')]
# because VOs circles should be bigger than GOs
if GO:
self.addObsGroup(obsGroupName, display_r=6371.2, search_radius=None)
else:
# assume unique radius for all VOs through time
self.addObsGroup(obsGroupName, display_r=radii[0], search_radius=850)
times_stamp = cdf_file.varget('Timestamp')
dec_times = []
for t in times_stamp:
year, month = cdflib.cdfepoch.breakdown_epoch(t)[:2]
dec_times.append(year + month / 12)
Bs, SVs = cdf_file.varget('B_CF'), cdf_file.varget('B_SV')
if GO:
Bs -= cdf_file.varget('bias_crust')
for i, (B, SV, r, th, ph, time) in enumerate(zip(Bs, SVs, radii, thetas, phis, dec_times)):
if self.obsGroups[obsGroupName].display_r is None and not GO:
self.obsGroups[obsGroupName].display_r = r
if GO:
self.obsGroups[obsGroupName].addObservatory(locs[i], r, th, ph)
else:
self.obsGroups[obsGroupName].addObservatory(obsGroupName, r, th, ph) # Add if not exists
if np.any(np.isnan(B)):
continue
else:
self.obsGroups[obsGroupName].addObservatoryData(th, ph, 'MF', time, B[0], B[1], B[2])
if np.any(np.isnan(SV)):
continue
else:
self.obsGroups[obsGroupName].addObservatoryData(th, ph, 'SV', time, SV[0], SV[1], SV[2])
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
......@@ -12,6 +12,18 @@ class HomePageController{
}
documentReady(){
//Add banner
let banner = new Banner(
'#maincontentdiv',
'<h3 class="ui" style="text-align: center"><a href="https://gricad-gitlab.univ-grenoble-alpes.fr/Geodynamo/pygeodyn">pygeodyn</a> (Python geomagnetic data assimilation package) and <a href="https://gricad-gitlab.univ-grenoble-alpes.fr/Geodynamo/webgeodyn">webgeodyn</a> (visualisation tool deployed on this website) are now available for download !</h3>',
'info');
//Add long description
$('#longDesc').load("view/longDesc.html");
//Add core flow descriptions
$.get("view/coreflowDesc.html", function(data){
$('#maincontentdiv .segment').append(data);
});
}
}
\ No newline at end of file
......@@ -28,5 +28,10 @@ $(document).ready(function() {
}
};
//Add downloadTab
$.get("view/downloadTab.html", function(data){
$('#tabMenu').append(data);
});
window.mc = new MainController();
});
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment