Commit 2f2d68c1 authored by Jonathan Schaeffer's avatar Jonathan Schaeffer
Browse files

Ne pas remonter la taille par stations

parent d10e33ef
__version__='0.30.0'
__version__='0.32.0'
......@@ -60,10 +60,8 @@ def scan_ph5_volume(volpath):
- sinon, en analysant les volumes ph5, mais je ne sais pas si on en a vraiment besoin.
"""
data = []
stations = [""]
volume = os.path.realpath(volpath)+'/'
logger.debug("Volume %s", volume)
starttime = datetime.now()
proc = subprocess.Popen(["ls", volume], stdout=subprocess.PIPE)
for l in io.TextIOWrapper(proc.stdout, encoding='utf-8'):
network = l.strip()
......@@ -75,15 +73,6 @@ def scan_ph5_volume(volpath):
# Bon, ça n'a pas marché, on fait quoi ?
logger.error("Unable to get year from path %s. Ignoring this one", path)
continue
try:
# Récupération de la liste des stations
h5data = h5py.File(f"{path}/master.ph5",'r')
logger.debug("Master PH5 stations: %s",
h5data['Experiment_g']['Maps_g'])
stations = [sta[6:] for sta in list(h5data['Experiment_g']['Maps_g']) if sta.startswith('Das_g_')]
logger.debug("%s stations in %s", len(stations), network)
except Exception as err:
logger.error("No master.ph5 file in %s. Let's assume there is one station", path)
total = 0
for dirpath, dirnames, filenames in os.walk(path):
......@@ -93,10 +82,8 @@ def scan_ph5_volume(volpath):
total = total + os.path.getsize(f"{path}/{i}")
logger.debug("Total size of %s is %s (%s GB)", network, total, total/(1024**3) )
# Make a statistic array with those stations dividing total size on each station.
per_station_size = int(total / len(stations))
for sta in stations:
data.append({'type': 'ph5_validated', 'year': year, 'network': network, 'station': sta,
'channel': None, 'quality': None, 'size': per_station_size})
data.append({'type': 'ph5', 'year': year, 'network': network, 'station': None,
'channel': None, 'quality': None, 'size': total})
return data
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment