Commit 64e05d1c authored by Jonathan Schaeffer's avatar Jonathan Schaeffer
Browse files

Get stats at channel level

parent 31fde30e
......@@ -34,16 +34,16 @@ def dict_dumper(dict):
def scan_volume(path):
"""
Scanne un volume indiqué par son chemin (path).
La fonction lance une commande "du -d3 path" et analyse chaque ligne renvoyée.
La fonction lance une commande "du -d4 path" et analyse chaque ligne renvoyée.
Elle renvoie une liste de dictionnaires :
[ {year: 2011, network: 'G', size: '100', files: '14', station: 'STAT'}, ...]
[ {year: 2011, network: 'G', size: '100', files: '14', station: 'STAT', channel: 'BHZ.D'}, ...]
"""
data = []
volume = os.path.realpath(path)+'/'
logger.debug("Volume %s"%(volume))
# TODO mettre le niveau de profondeur (2) en option
starttime = datetime.datetime.now()
lines = subprocess.check_output(["du", "--exclude", ".snapshot", "-d3", volume]).decode("utf-8").splitlines()
lines = subprocess.check_output(["du", "--exclude", ".snapshot", "-d4", volume]).decode("utf-8").splitlines()
logger.debug("Volume scanned in %s"%(datetime.datetime.now() - starttime))
for l in lines:
logger.debug(l)
......@@ -51,10 +51,10 @@ def scan_volume(path):
# On ne garde que le chemin qui nous intéresse
path = path.replace(volume,'').split('/')
# Ne pas considérer le seul chemin de niveau 1
if len(path) == 3:
if len(path) == 4:
logger.debug(f"path : {path}")
logger.debug(f"size : {size}")
data.append({'year': path[0], 'network': path[1], 'station': path[2], 'size': size})
data.append({'year': path[0], 'network': path[1], 'station': path[2], 'channel': path[3], 'size': size})
return data
......@@ -165,9 +165,9 @@ def cli(configfile):
conn = psycopg2.connect(dbname=cfg['postgres']['database'], user=cfg['postgres']['user'], host=cfg['postgres']['host'], password=cfg['postgres']['password'], port=cfg['postgres']['port'])
cur = conn.cursor()
execute_values(cur,
"""INSERT INTO datastats (network, year, type, size, is_permanent, date) VALUES %s""",
"""INSERT INTO datastats (network, year, station, channel, type, size, is_permanent, date) VALUES %s""",
statistics,
"(%(network)s, %(year)s, %(type)s, %(size)s, %(is_permanent)s, %(date)s)")
"(%(network)s, %(year)s, %(station)s, %(channel)s, %(type)s, %(size)s, %(is_permanent)s, %(date)s)")
conn.commit()
if 'influxdb' in cfg:
......@@ -182,6 +182,7 @@ def cli(configfile):
"year": int(stat['year']),
"network": stat['network'],
"station": stat['station'],
"channel": stat['channel'],
"permanent": bool(stat['is_permanent']),
"type": stat['type'],
"date": stat['date']
......
......@@ -6,7 +6,7 @@ with open('README.md', 'r', encoding='utf-8') as f:
setup(
name='resifdatareporter',
version='0.7.9',
version='0.8.0',
description='Scans the resif data repository and compute metrics. Sends the result in influxdb or postgres',
long_description=readme,
long_description_content_type="text/markdown",
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment