Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
Geodynamo
pygeodyn
Commits
7a8eb734
Commit
7a8eb734
authored
Jan 20, 2021
by
Mathieu Istas
Browse files
implemented parameter to discardh high latitudes data
parent
e44f2e99
Changes
4
Hide whitespace changes
Inline
Side-by-side
pygeodyn/inout/config.py
View file @
7a8eb734
...
...
@@ -225,6 +225,12 @@ class ComputationConfig(object):
if
not
hasattr
(
self
,
'obs_select'
):
logging
.
warning
(
'No values of govo_obs_list found, default take all obs files in observation directory'
)
self
.
obs_select
=
all
if
not
hasattr
(
self
,
'discard_high_lat'
):
logging
.
warning
(
"Suppression of high latitude data was not set, assuming no suppression"
)
self
.
discard_high_lat
=
False
elif
self
.
discard_high_lat
:
logging
.
warning
(
"Using default value of 60 degree latitude for the suppression of high latitude data"
)
self
.
discard_high_lat
=
60
else
:
self
.
obs_select
=
None
...
...
pygeodyn/inout/observations.py
View file @
7a8eb734
...
...
@@ -94,7 +94,7 @@ def build_go_vo_observations(cfg, nb_realisations, measure_type):
logging
.
warning
(
'cdf file corresponding to {} has not been found, continuing without'
.
format
(
obs
))
continue
# all_data consist of (obs_pos, obs_data, var_data)
all_data
=
reads
.
read_observatories_cdf_data
(
file
,
measure_type
,
obs
)
all_data
=
reads
.
read_observatories_cdf_data
(
file
,
measure_type
,
obs
,
discard
=
cfg
.
discard_high_lat
)
complete_obs
=
noise_GVO_data
(
obs
,
all_data
,
cfg
,
nb_realisations
,
measure_type
,
complete_obs
)
# End of loop on obs_type
...
...
pygeodyn/inout/reads.py
View file @
7a8eb734
...
...
@@ -7,6 +7,7 @@ import h5py
from
.
import
priors
as
priors_module
from
..
import
utilities
import
cdflib
import
h5py
def
extract_realisations
(
prior_directory
,
prior_type
,
measures
=
None
):
"""
...
...
@@ -155,23 +156,9 @@ def read_all_computed_realisations(basename, nb_realisations):
return
dates
,
np
.
array
(
all_measures
,
dtype
=
np
.
float64
)
def
cdf_times_to_np_date
(
times
):
"""
Transform the times in cdf epoch into numpy dates, rounding month to nearest.
"""
dates
=
[]
for
t
in
times
:
year
,
month
,
day
=
cdflib
.
cdfepoch
.
breakdown_epoch
(
t
)[:
3
]
if
day
>
15
:
if
month
==
12
:
year
+=
1
month
=
1
else
:
month
+=
1
dates
.
append
(
np
.
datetime64
(
'{}-{:02}'
.
format
(
year
,
month
)))
return
dates
def
read_observatories_cdf_data
(
cdf_filename
,
measure_type
,
obs
,
remove_bias_crust
=
True
,
huge_sigma
=
99999
):
def
read_observatories_cdf_data
(
cdf_filename
,
measure_type
,
obs
,
remove_bias_crust
=
True
,
huge_sigma
=
99999
,
discard
=
False
):
"""
Reads the data of observatories file in cdf format.
In the future, measure_type should not be a parameter and both MF and SV should
...
...
@@ -208,13 +195,6 @@ def read_observatories_cdf_data(cdf_filename, measure_type, obs, remove_bias_cru
err
=
'At least some invalid values of sigma correspond to valid values in B, file, measure and obs {}, {}, {}'
err
=
err
.
format
(
cdf_filename
.
split
(
'/'
)[
-
1
],
measure_type
,
obs
)
# Weak test, test that sigma[i] = Nan implies Bs[i] = Nan, for all i.
#assert np.all(np.isnan(Bs[np.isnan(sigma)])), err
# err = 'No bijection between invalid values of sigma and B, file, measure and obs {}, {}, {}'
# err = err.format(cdf_filename.split('/')[-1], measure_type, obs)
# # Test that Bs[i] = Nan implies sigma[i] = Nan and reciprocally, for all i.
# assert np.all(np.isnan(Bs) == np.isnan(sigma)), err
if
obs
==
'GO'
:
locs
=
[
''
.
join
([
l
[
0
]
for
l
in
loc
])
for
loc
in
cdf_file
.
varget
(
'Obs'
)]
...
...
@@ -222,7 +202,7 @@ def read_observatories_cdf_data(cdf_filename, measure_type, obs, remove_bias_cru
else
:
N_VO
=
len
(
dict
.
fromkeys
(
zip
(
thetas
,
phis
)).
keys
())
# small trick that counts the number of unique pairs of theta/phi
dates
=
cdf_times_to_np_date
(
times
)
dates
=
utilities
.
cdf_times_to_np_date
(
times
)
# dates = [np.datetime64('{}-{:02}'.format(*cdflib.cdfepoch.breakdown_epoch(t)[:2])) for t in times]
# Precise the first date as cdf files are not human readable and therefore it is not easy to find the date manually
# logging.info('First data available at {}'.format(dates[0]))
...
...
@@ -231,7 +211,20 @@ def read_observatories_cdf_data(cdf_filename, measure_type, obs, remove_bias_cru
obs_positions
=
defaultdict
(
list
)
var_data
=
defaultdict
(
list
)
if
discard
:
to_deg
=
lambda
alpha
:
180
*
alpha
/
np
.
pi
to_rad
=
lambda
alpha
:
np
.
pi
*
alpha
/
180
chaos_file
=
'pygeodyn/data/observations/CHAOS-7/CHAOS-7.hdf5'
# takes Chaos file from usual data location
times_chaos
,
dipoles
=
utilities
.
extract_dipole_chaos
(
chaos_file
)
for
i
,
date
in
enumerate
(
dates
):
if
discard
:
# if discard high_lat set to 0, will pass.
g_10
,
g_11
,
h_11
=
dipoles
[
np
.
argmin
(
abs
(
times_chaos
-
times
[
i
])),
:
3
]
th_0
,
ph_0
=
utilities
.
north_geomagnetic_pole_angle
(
g_10
,
g_11
,
h_11
)
theta_d
=
to_deg
(
utilities
.
geomagnetic_latitude
(
thetas
[
i
],
phis
[
i
],
th_0
,
ph_0
))
# all angles should be in rad
if
theta_d
>
90
+
discard
or
theta_d
<
90
-
discard
:
continue
if
any
(
np
.
isnan
(
Bs
[
i
,
:])):
continue
if
any
(
np
.
isnan
(
sigma2
[
i
,
:])):
...
...
pygeodyn/utilities.py
View file @
7a8eb734
...
...
@@ -3,11 +3,11 @@
Utility functions
"""
import
h5py
import
numpy
as
np
import
scipy.interpolate
import
math
import
cdflib
def
decimal_to_date
(
decimal_date
,
month_shift
=
0
):
"""
...
...
@@ -49,6 +49,22 @@ def date_to_decimal(date, nb_decimals=None):
return
round
(
int
(
year
)
+
(
int
(
month
))
/
12.
,
nb_decimals
)
def
cdf_times_to_np_date
(
times
):
"""
Transform the times in cdf epoch into numpy dates, rounding month to nearest.
"""
dates
=
[]
for
t
in
times
:
year
,
month
,
day
=
cdflib
.
cdfepoch
.
breakdown_epoch
(
t
)[:
3
]
if
day
>
15
:
if
month
==
12
:
year
+=
1
month
=
1
else
:
month
+=
1
dates
.
append
(
np
.
datetime64
(
'{}-{:02}'
.
format
(
year
,
month
)))
return
dates
def
date_array_to_decimal
(
date_array
,
nb_decimals
=
None
):
"""
Uses date_to_decimal to convert a 1D array of datetime64 in a 1D array of floating numbers.
...
...
@@ -118,6 +134,37 @@ def get_degree_order(k):
order
=
twice_order
//
2
return
degree
,
order
,
coef
def
north_geomagnetic_pole_angle
(
g_10
,
g_11
,
h_11
):
"""
Given the first three elements of the spherical harmonic decomposition,
return the angle theta and phi of the geomagnetic pole.
Details in:
Hulot, G., et al. "The present and future geomagnetic field." (2015): 33-78.
"""
g_vec
=
np
.
array
([
g_10
,
g_11
,
h_11
])
m_0
=
np
.
sqrt
(
g_vec
@
g_vec
)
return
np
.
pi
-
np
.
arccos
(
g_10
/
m_0
),
-
np
.
pi
+
np
.
arctan2
(
h_11
,
g_11
)
def
geomagnetic_latitude
(
theta
,
phi
,
theta_0
,
phi_0
):
"""
Computes the geomagnetic (or dipole) latitude.
Angle must be given in radians.
theta_0: latitude of the north geomagnetic pole
phi_0: longitude of the north geomagnetic pole
"""
cos_term
=
np
.
cos
(
theta
)
*
np
.
cos
(
theta_0
)
sin_term
=
np
.
sin
(
theta
)
*
np
.
sin
(
theta_0
)
*
np
.
cos
(
phi
-
phi_0
)
return
np
.
arccos
(
cos_term
+
sin_term
)
def
extract_dipole_chaos
(
chaos_file
):
"""
Extract the coefficients of the dipole (g_10, g_11, h_11) from chaos_file,
at all available times.
"""
with
h5py
.
File
(
chaos_file
)
as
hf
:
gnms
=
np
.
array
(
hf
[
'gnm'
])
times
=
np
.
array
(
hf
[
'times'
])
return
times
,
gnms
[:,
:
3
]
def
interpolate_Plm
(
thetas
,
legendre_poly_values_at_thetas
,
l
,
m
,
**
kwargs
):
# Set fill values for extremal values (-1, 1)
...
...
Write
Preview
Supports
Markdown
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment