Commit a973148f authored by Jerome Touvier's avatar Jerome Touvier
Browse files

Merge branch 'cleanup' into 'master'

cleanup

See merge request OSUG/RESIF/ws-timeseries!3
parents 68eaec40 1e5aad9c
......@@ -10,5 +10,5 @@ COPY apps ./apps/
COPY templates ./templates/
COPY static ./static/
CMD ["/bin/bash", "-c", "gunicorn --bind 0.0.0.0:8000 start_timeseries:app"]
ENTRYPOINT ["gunicorn"]
CMD ["start_timeseries:app"]
# List of loggers, handlers and formatters:
[loggers]
keys=root
[handlers]
keys=consoleHandler
[formatters]
keys=generic, verbose
# loggers:
[logger_root]
level=DEBUG
handlers=consoleHandler
# handlers:
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
formatter=generic
args=(sys.stdout,)
# formatters:
[formatter_generic]
#format=[%(asctime)s] %(levelname)s (%(process)d) [%(module)s:%(lineno)d] %(message)s
format=[%(asctime)s] %(levelname)s (%(process)d) [%(pathname)s:%(lineno)d] %(message)s
# datefmt='%Y-%m-%d %H:%M:%
[formatter_verbose]
format=[%(asctime)s] %(levelname)s (%(process)d) [%(pathname)s:%(lineno)d] %(message)s %(stack_info)s
# List of loggers, handlers and formatters:
[loggers]
keys=root
[handlers]
keys=consoleHandler
[formatters]
keys=generic, verbose
# loggers:
[logger_root]
level=INFO
handlers=consoleHandler
# handlers:
[handler_consoleHandler]
class=StreamHandler
level=INFO
formatter=generic
args=(sys.stdout,)
# formatters:
[formatter_generic]
#format=[%(asctime)s] %(levelname)s (%(process)d) [%(module)s:%(lineno)d] %(message)s
format=[%(asctime)s] %(levelname)s (%(process)d) [%(pathname)s:%(lineno)d] %(message)s
# datefmt='%Y-%m-%d %H:%M:%
[formatter_verbose]
format=[%(asctime)s] %(levelname)s (%(process)d) [%(pathname)s:%(lineno)d] %(message)s %(stack_info)s
import logging
import time
from tempfile import NamedTemporaryFile
from flask import make_response
from obspy.clients.fdsn import Client
from obspy.core import UTCDateTime
from obspy.signal.filter import envelope
from apps.globals import Error
from apps.globals import FDSN_CLIENT
from apps.globals import FROM_CLIENT
from apps.globals import MAX_DATA_POINTS
from apps.globals import MAX_DATA_POINTS_PROCESSING
from apps.globals import MAX_PLOTS
from apps.globals import USER_AGENT_TIMESERIES_INVENTORY
from apps.utils import error_500
from apps.utils import get_bounds
from apps.utils import get_signal
from apps.utils import get_signal_from_client
from apps.utils import get_periodogram
from apps.utils import get_response
from apps.utils import nodata_error
from apps.utils import overflow_error
from apps.utils import remove_response
from apps.utils import static_plots
from apps.utils import tictac
def get_processed_signal(st, params):
"""Signal processing """
tic = time.time()
for n, tr in enumerate(st):
logging.debug(f"Processing trace {n}...")
for item in params["request"]:
# Processings are applied only once and in the order given by the list params["request"].
# Do to this with boolean parameters (e.g. demean) we check if they are both true and in the request.
if params["earthunits"] and item in ("earthunits", "correct"):
remove_response(tr, params)
elif params["demean"] and item == "demean":
tr.detrend("demean")
logging.debug("demean")
elif params["detrend"] and item == "detrend":
tr.detrend("linear")
logging.debug("detrend")
elif params["envelope"] and item == "envelope":
tr.data = envelope(tr.data)
logging.debug("envelope")
elif params["diff"] and item == "diff":
tr.differentiate(method="gradient")
logging.debug("diff (method=gradient)")
elif params["int"] and item == "int":
tr.integrate(method="cumtrapz")
logging.debug("int (method=cumtrapz)")
elif item == "scale":
tr.data = params["scale"] * tr.data
logging.debug(f"scale: {params['scale']}")
elif item == "divscale":
tr.data = (1.0 / params["divscale"]) * tr.data
logging.debug(f"divscale: {params['divscale']}")
elif item in ("decimate", "deci"):
tr.decimate(params["deci"], strict_length=False, no_filter=False)
logging.debug("deci (strict_length=False, no_filter=False)")
elif item == "taper":
taper_trace(tr, params)
elif item in ("lpfilter", "lp"):
tr.filter("lowpass", freq=params["lp"], zerophase=params["zerophase"])
logging.debug(f"lp: {params['lp']}, zerophase={params['zerophase']}")
elif item in ("hpfilter", "hp"):
tr.filter("highpass", freq=params["hp"], zerophase=params["zerophase"])
logging.debug(f"hp: {params['hp']}, zerophase={params['zerophase']}")
elif item in ("bpfilter", "bp"):
tr.filter(
"bandpass",
freqmin=params["bp"][0],
freqmax=params["bp"][1],
zerophase=params["zerophase"],
)
logging.debug(f"bp: {params['bp']}, zerophase={params['zerophase']}")
if params["spectrum"]:
for n, tr in enumerate(st):
tr.time_array, tr.data = get_periodogram(tr.data)
logging.info(f"Processed signal in {tictac(tic)} seconds.")
return st
def taper_trace(tr, params):
win = "hann" if params["taper"][1] == "HANNING" else params["taper"][1].lower()
tr.taper(params["taper"][0], type=win, max_length=None, side="both")
msg = f"Taper trace : max_percentage={params['taper'][0]}, type={win}"
logging.debug(msg)
def get_file_type(params):
# (time, values) 2 columns
if params["format"] in ("ascii", "tspair"):
file_type, file_ext = "TSPAIR", ".csv"
# (values) written from left to right (6 columns max)
elif params["format"] == "slist":
file_type, file_ext = "SLIST", ".csv"
elif params["format"] in ("miniseed", "mseed"):
file_type, file_ext = "MSEED", ".mseed"
# little-endian SAC
elif params["format"] == "sac":
file_type, file_ext = "SAC", ".sac"
return (file_type, file_ext)
def set_sac_header(params, st):
try:
for tr in st:
stats = tr.stats
client = Client(FDSN_CLIENT, user_agent=USER_AGENT_TIMESERIES_INVENTORY)
inventory = client.get_stations(
network=stats["network"],
station=stats["station"],
location=stats["location"],
channel=stats["channel"],
level="channel",
)
inv_sta = inventory[0][0]
inv_cha = inv_sta[0]
stats.sac = {}
if hasattr(inv_cha, "azimuth"):
stats.sac["cmpaz"] = inv_cha.azimuth
if hasattr(inv_cha, "dip"):
stats.sac["cmpinc"] = inv_cha.dip
if hasattr(inv_sta, "latitude"):
stats.sac["stla"] = inv_sta.latitude
if hasattr(inv_sta, "longitude"):
stats.sac["stlo"] = inv_sta.longitude
if hasattr(inv_sta, "depth"):
stats.sac["stdp"] = inv_sta.depth
if hasattr(inv_sta, "elevation"):
stats.sac["stel"] = inv_sta.elevation
except Exception as ex:
logging.exception(str(ex))
return error_500(Error.UNSPECIFIED)
def get_file(params, st):
"""Create temporary timeseries file.
The name is built according to the template :
resifws-timeseries.2018-11-29T10_11_32.000Z.2018-11-29T23_42_56.000Z
:param params: Parameters object with url parameters (network, station, ...)
:param st: obspy stream
:returns: response_class flask object containing the file
"""
tic = time.time()
try:
(file_type, file_ext) = get_file_type(params)
(start, end) = get_bounds(st)
start = UTCDateTime(start).strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
end = UTCDateTime(end).strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
period = ".".join([start, end])
fname = "resifws-timeseries"
if len(st) == 1:
stat = st[0].stats
fname = ".".join([stat.network, stat.station, stat.location, stat.channel])
fname = fname + "." + period + file_ext
headers = {"Content-Disposition": "attachment; filename=" + fname}
tmp = NamedTemporaryFile(delete=True)
if file_ext == ".sac":
set_sac_header(params, st)
st.write(tmp.name, format=file_type)
response = make_response((tmp.read(), headers))
if file_ext == ".csv":
response.mimetype = "text/csv"
else:
response.mimetype = "application/octet-stream"
return response
except Exception as ex:
logging.exception(str(ex))
return error_500(Error.UNSPECIFIED)
finally:
tmp.close()
logging.info(f"Response with file created in {tictac(tic)} seconds.")
def get_output(params):
"""Create timeseries plots.
:param params: Parameters object with url parameters (network, station, ...)
:returns: static plot(s) or data file
:raises MemoryError raises memory exception
:raises ValueError raises value exception
"""
try:
tic0 = time.time()
st = None
response = None
tic1 = time.time()
if FROM_CLIENT:
st = get_signal_from_client(params)
else:
st = get_signal(params)
logging.info(f"Get data in {tictac(tic1)} seconds.")
if st is None or len(st) == 0:
return None
npoints = sum([len(tr.data) for tr in st])
if params["format"] == "plot":
if npoints > MAX_DATA_POINTS:
return overflow_error(Error.TOO_MUCH_DATA)
if npoints > MAX_DATA_POINTS_PROCESSING and params["earthunits"]:
return overflow_error(Error.TOO_MUCH_DATA_PROCESSING)
if len(st) > MAX_PLOTS:
return overflow_error(Error.PLOTS)
if params["earthunits"]:
tic1 = time.time()
try:
st.attach_response(get_response(params))
except Exception as ex:
logging.debug(str(ex))
return nodata_error(Error.RESPONSE)
logging.info(f"Attach response in {tictac(tic1)} seconds.")
st = get_processed_signal(st, params)
if params["format"] == "plot":
response = static_plots(params, st)
else:
response = get_file(params, st)
return response
except (MemoryError, ValueError) as plotex:
logging.exception(str(plotex))
return overflow_error(Error.PROCESSING)
except Exception as ex:
logging.exception(str(ex))
return error_500(Error.UNSPECIFIED)
finally:
if st:
delta = params["end"] - params["start"]
logging.debug(f"Period of {delta} with {npoints} data points.")
if response:
bytes = response.headers.get("Content-Length")
logging.info(f"{bytes} bytes rendered in {tictac(tic0)} seconds.")
import logging
import re
import time
from bokeh.embed import file_html
from bokeh.layouts import column
from bokeh.models import DatetimeTickFormatter, HoverTool
from bokeh.plotting import figure
from bokeh.resources import CDN
from flask import make_response
from apps.globals import Error
from apps.globals import FROM_CLIENT
from apps.globals import MAX_DATA_POINTS
from apps.globals import MAX_DATA_POINTS_PROCESSING
from apps.globals import MAX_PLOTS
from apps.utils import error_500
from apps.utils import get_bounds
from apps.utils import get_signal
from apps.utils import get_signal_from_client
from apps.utils import get_periodogram
from apps.utils import get_response
from apps.utils import get_units
from apps.utils import nodata_error
from apps.utils import overflow_error
from apps.utils import remove_response
from apps.utils import static_plots
from apps.utils import tictac
def get_processed_signal(st, params):
"""Signal processing """
tic = time.time()
for n, tr in enumerate(st):
logging.debug(f"Processing trace {n}...")
if params["earthunits"]:
remove_response(tr, params)
elif params["demean"]:
tr.detrend("demean")
logging.debug("demean")
if params["spectrum"]:
for n, tr in enumerate(st):
tr.time_array, tr.data = get_periodogram(tr.data)
logging.info(f"Processed signal in {tictac(tic)} seconds.")
return st
def date_tick_formatter():
"""Create an xaxis formatter according to different date and time scales."""
return DatetimeTickFormatter(
# microseconds=["%fus "]
# milliseconds=["%3Nms ", "%S.%3Ns"]
milliseconds=["%F ", "%T"],
seconds=["%F ", "%T"],
minsec=["%F ", "%T"],
minutes=["%F ", "%T"],
hourmin=["%F ", "%T"],
hours=["%F ", "%T"],
days=["%F ", "%T"],
months=["%F ", "%T"],
years=["%F ", "%T"],
)
# from bokeh.models import FuncTickFormatter
# return FuncTickFormatter(code="""
# var date = new Date(tick*1000).toISOString()
# if (index == 0) {
# return(date.replace("Z", ""));
# } else {
# return(date.split("T")[1].replace("Z", ""));
# } """)
def dynamic_plots(params, st):
"""Create dynamic timeseries plots.
This function return dynamic timeseries plots build with bokeh and embedded
into HTML via Flask renderer.
:param params: Parameters object with url parameters (network, station, ...)
:returns: response_class flask object containing dynamic timeseries plot(s)
"""
tic = time.time()
plots = list()
(left, right) = get_bounds(st)
pcolor = "black" if params["monochrome"] else "#" + params["color"]
for tr in st:
# define tools and tooltips
units = get_units(params, tr.stats.channel)
tooltips = [("Amplitude", "@y" + units), ("Date", "@x{%F %T.%3N}")]
hover = HoverTool(tooltips=tooltips, formatters={"@x": "datetime"})
tools = "crosshair, save, pan, wheel_zoom, box_zoom, zoom_in, zoom_out, reset"
# create a new plot with the tools above
plot = figure(
tools=[hover, tools],
plot_width=params["width"],
plot_height=params["height"],
x_range=(1000 * left, 1000 * right),
active_drag="box_zoom",
# active_scroll="wheel_zoom",
)
plot.toolbar.logo = None
plot.xaxis[0].ticker.desired_num_ticks = 4
plot.xaxis[0].formatter = date_tick_formatter()
plot.line([t * 1000 for t in tr.times("timestamp")], tr.data, color=pcolor)
text = (tr.stats.network, tr.stats.station, tr.stats.location, tr.stats.channel)
if params["showtitle"]:
plot.title.text = "[ %s_%s_%s_%s ]" % text
plots.append(plot)
# Puts the result in a column.
plots = column(plots)
# Generate a complete HTML page embedding the Bokeh plot.
html = file_html(plots, CDN)
html = re.sub(r"<title>.*</title>", "<title>resifws-timeseriesplot</title>", html)
# Returns the rendered HTML to the browser.
logging.info(f"Response with dynamic plot created in {tictac(tic)} seconds.")
return make_response(html)
def get_output(params):
"""Create timeseries plots.
:param params: Parameters object with url parameters (network, station, ...)
:returns: static or dynamic plot(s)
:raises MemoryError raises memory exception
:raises ValueError raises value exception
"""
try:
tic0 = time.time()
st = None
response = None
tic1 = time.time()
if FROM_CLIENT:
st = get_signal_from_client(params)
else:
st = get_signal(params)
logging.info(f"Get data in {tictac(tic1)} seconds.")
if st is None or len(st) == 0:
return None
npoints = sum([len(tr.data) for tr in st])
if npoints > MAX_DATA_POINTS:
return overflow_error(Error.TOO_MUCH_DATA)
if npoints > MAX_DATA_POINTS_PROCESSING and params["earthunits"]:
return overflow_error(Error.TOO_MUCH_DATA_PROCESSING)
if len(st) > MAX_PLOTS:
return overflow_error(Error.PLOTS)
if params["earthunits"]:
tic1 = time.time()
try:
st.attach_response(get_response(params))
except Exception as ex:
logging.debug(str(ex))
return nodata_error(Error.RESPONSE)
logging.info(f"Attach response in {tictac(tic1)} seconds.")
st = get_processed_signal(st, params)
if params["iplot"]:
response = dynamic_plots(params, st)
else:
response = static_plots(params, st)
return response
except (MemoryError, ValueError) as plotex:
logging.exception(str(plotex))
return overflow_error(Error.PROCESSING)
except Exception as ex:
logging.exception(str(ex))
return error_500(Error.UNSPECIFIED)
finally:
if st:
delta = params["end"] - params["start"]
logging.debug(f"Period of {delta} with {npoints} data points.")
if response:
bytes = response.headers.get("Content-Length")
logging.info(f"{bytes} bytes rendered in {tictac(tic0)} seconds.")
import logging.config
import os
# Create logging
try:
RUNMODE = os.environ["RUNMODE"]
except Exception as ex:
print(str(ex))
CONFIG_FILE = "logging_config_" + str(RUNMODE) + ".ini"
FILE_DIR = os.path.dirname(__file__)
try:
os.makedirs(FILE_DIR + "/logs")
except (FileExistsError, FileNotFoundError):
pass
logging.config.fileConfig(os.path.join(FILE_DIR, "apps", CONFIG_FILE))
logging.debug("RUNMODE variable successfully loaded: RUNMODE = %s " % RUNMODE)
logging.debug("Logs directory successfully created in: %s" % FILE_DIR)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment