Commit 4a73fa4a authored by Jonathan Schaeffer's avatar Jonathan Schaeffer
Browse files

Cast des valeurs dans les bons types.

Plus de messages de logs
parent db2d6dfd
......@@ -40,7 +40,9 @@ def scan_volume(path):
volume = os.path.realpath(path)+'/'
logger.debug("Volume %s"%(volume))
# TODO mettre le niveau de profondeur (2) en option
lines = subprocess.check_output(["du", "-d2", volume]).decode("utf-8").splitlines()
starttime = datetime.datetime.now()
lines = subprocess.check_output(["du", "--exclude .snapshot", "-d2", volume]).decode("utf-8").splitlines()
logger.debug("Volume scanned in %s"%(datetime.datetime.now() - starttime))
for l in lines:
logger.debug(l)
(size, path) = l.split('\t')
......@@ -62,6 +64,8 @@ def scan_volumes(volumes):
# [ {stat}, {stat}, ]
volume_stats = []
for volume in volumes:
logger.debug("Preparing scan of volume %s", volume['path'])
starttime = datetime.datetime.now()
if 'path' in volume:
stats = scan_volume(volume['path'])
# On rajoute le type comme un élément de chaque statistique
......@@ -73,6 +77,7 @@ def scan_volumes(volumes):
else:
raise ValueError("Volume has no path key : %s"%(volume))
# on applati la liste de listes :
logger.debug("All volumes scanned in %s"%(datetime.datetime.now() - starttime))
return [ x for vol in volume_stats for x in vol ]
......@@ -127,6 +132,17 @@ def main():
logger.debug(statistics)
today = datetime.date.today().strftime("%Y-%m-%d")
# add the network_type (is the network permanent or not) to the statistic
for stat in statistics:
is_permanent = False
if stat['network'] in cfg['metadata']['permanent_networks']:
is_permanent = True
stat['is_permanent'] = is_permanent
# TODO: à revoir, la ligne suivante, c'est la honte
stat['date'] = today
logger.debug(stat)
# Open dump file and write the stats.
if use_cache == False:
try:
......@@ -139,73 +155,63 @@ def main():
except:
logger.error("Error writing data to cache : "+sys.exc_info()[0])
# add the network_type (is the network permanent or not) to the statistic
for stat in statistics:
is_permanent = False
if stat['network'] in cfg['metadata']['permanent_networks']:
is_permanent = True
stat['is_permanent'] = is_permanent
# TODO: à revoir, la ligne suivante, c'est la honte
stat['date'] = today
logger.debug(stat)
# Write to postgres database
if 'postgres' in cfg:
logger.info('Writing to postgres database')
conn = psycopg2.connect(dbname=cfg['postgres']['database'], user=cfg['postgres']['user'], host=cfg['postgres']['host'], password=cfg['postgres']['password'], port=cfg['postgres']['port'])
cur = conn.cursor()
execute_values(cur,
"""INSERT INTO datastats (network, year, type, size, is_permanent, date) VALUES %s""",
statistics,
"(%(network)s, %(year)s, %(type)s, %(size)s, %(is_permanent)s, %(date)s)")
conn.commit()
if 'influxdb' in cfg:
logger.info('Writing in influxdb')
influxdb_json_data = []
# Compose json data
record_time = strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
for stat in statistics:
influxdb_json_data.append(
{"measurement": cfg['influxdb']['measurement'],
"tags": {
"year": stat['year'],
"network": stat['network'],
"permanent": stat['is_permanent'],
"type": stat['type'],
"date": today
},
"time": record_time,
"fields": {
"size": stat['size']
}
}
)
logger.info(pformat(influxdb_json_data))
# Now, send this data to influxdb
try:
logger.info("Sending data to influxdb")
logger.debug("host = "+cfg['influxdb']['server'])
logger.debug("port = "+str(cfg['influxdb']['port']))
logger.debug("database = "+cfg['influxdb']['database'])
logger.debug("username = "+cfg['influxdb']['user'])
client = InfluxDBClient(host = cfg['influxdb']['server'],
port = cfg['influxdb']['port'],
database = cfg['influxdb']['database'],
username = cfg['influxdb']['user'],
password = cfg['influxdb']['password'],
ssl = cfg['influxdb']['ssl'],
verify_ssl = cfg['influxdb']['verify_ssl']
)
client.write_points(influxdb_json_data)
except Exception as e:
logger.error("Unexpected error writing data to influxdb")
logger.error(e)
# Write to postgres database
if 'postgres' in cfg:
logger.info('Writing to postgres database')
conn = psycopg2.connect(dbname=cfg['postgres']['database'], user=cfg['postgres']['user'], host=cfg['postgres']['host'], password=cfg['postgres']['password'], port=cfg['postgres']['port'])
cur = conn.cursor()
execute_values(cur,
"""INSERT INTO datastats (network, year, type, size, is_permanent, date) VALUES %s""",
statistics,
"(%(network)s, %(year)s, %(type)s, %(size)s, %(is_permanent)s, %(date)s)")
conn.commit()
if 'influxdb' in cfg:
logger.info('Writing in influxdb')
influxdb_json_data = []
# Compose json data
record_time = strftime("%Y-%m-%dT%H:%M:%SZ", gmtime())
for stat in statistics:
influxdb_json_data.append(
{"measurement": cfg['influxdb']['measurement'],
"tags": {
"year": int(stat['year']),
"network": stat['network'],
"permanent": bool(stat['is_permanent']),
"type": stat['type'],
"date": today
},
"time": record_time,
"fields": {
"size": int(stat['size'])
}
}
)
logger.info(pformat(influxdb_json_data))
# Now, send this data to influxdb
try:
logger.info("Sending data to influxdb")
logger.debug("host = "+cfg['influxdb']['server'])
logger.debug("port = "+str(cfg['influxdb']['port']))
logger.debug("database = "+cfg['influxdb']['database'])
logger.debug("username = "+cfg['influxdb']['user'])
client = InfluxDBClient(host = cfg['influxdb']['server'],
port = cfg['influxdb']['port'],
database = cfg['influxdb']['database'],
username = cfg['influxdb']['user'],
password = cfg['influxdb']['password'],
ssl = cfg['influxdb']['ssl'],
verify_ssl = cfg['influxdb']['verify_ssl']
)
client.write_points(influxdb_json_data)
except Exception as e:
logger.error("Unexpected error writing data to influxdb")
logger.error(e)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment