Commit cf83de02 authored by Jonathan Schaeffer's avatar Jonathan Schaeffer
Browse files

Dockerise application

parent 5ce9cb3f
stages:
- deploy
- build
variables:
GIT_SUBMODULE_STRATEGY: recursive
deploy test:
stage: deploy
......@@ -12,3 +12,24 @@ deploy test:
- rsync -a --exclude .git* $CI_PROJECT_DIR /srv/deploy/
- docker restart ws-timeseries
- docker restart ws-timeseriesplot
build:
stage: build
image: gricad-registry.univ-grenoble-alpes.fr/kubernetes-alpes/buildah:latest
tags:
- dind
variables:
REGISTRY_LOGIN: buildah login -u gitlab-ci-token -p $CI_REGISTRY_PASSWORD
REGISTRY_LOGOUT: buildah logout
IMAGE_BUILD: buildah build-using-dockerfile --storage-driver vfs --format docker
IMAGE_PUSH: buildah push --storage-driver vfs
DOCKERFILE: Dockerfile
IMAGE_NAME: $CI_REGISTRY_IMAGE/ws-timeseries:$CI_COMMIT_SHORT_SHA
script:
- $REGISTRY_LOGIN $CI_REGISTRY
- $IMAGE_BUILD --file $DOCKERFILE $BUILD_ARG --tag $IMAGE_NAME .
- $IMAGE_PUSH $IMAGE_NAME $IMAGE_NAME
- $REGISTRY_LOGOUT $CI_REGISTRY
FROM python:3.8-slim
RUN apt-get update && apt-get install -y python-pip
# Installation de l'application Flask
COPY requirements.txt /
RUN pip install --no-cache-dir numpy
RUN pip install --no-cache-dir -r /requirements.txt
RUN pip install --no-cache-dir gunicorn
WORKDIR /app
COPY start*.py log_init.py ./
COPY apps ./apps/
COPY templates ./templates/
COPY static ./static/
ENTRYPOINT ["gunicorn"]
CMD ["start_timeseries:app"]
# Timeseries and timeseries plot webservice
## Prerquisites
## Running timeseries
```
docker run --rm -p 8000:8000 \
-e PGPASSWORD=________ \
-e PG_DBURI=postgresql://role@host:5432/databese \
-e RUNMODE=dev \
-e GUNICORN_CMD_ARGS="--bind 0.0.0.0" \
--name timeseries \
gricad-registry.univ-grenoble-alpes.fr/osug/resif/ws-timeseries/ws-timeseries start_timeseries:app
```
## Running timeseriesplot
```
docker run --rm -p 8000:8000 \
-e PGPASSWORD=________ \
-e PG_DBURI=postgresql://role@host:5432/databese \
-e RUNMODE=dev \
-e GUNICORN_CMD_ARGS="--bind 0.0.0.0" \
--name timeseries \
gricad-registry.univ-grenoble-alpes.fr/osug/resif/ws-timeseries/ws-timeseries start_timeseriesplot:app
```
......@@ -6,7 +6,6 @@ import os
import re
import sys
import traceback
import psycopg2
......@@ -15,16 +14,6 @@ levels = [logging.CRITICAL, logging.ERROR, logging.WARNING, logging.INFO, loggin
## valid request parameters
url_keys = ("network", "station", "channel", "location", "net", "sta", "cha", "loc")
def database_config():
config = dict()
parser = configparser.ConfigParser()
parser.read("database_config.ini")
for name in parser.options("postgresql"):
config.update({name: parser.get("postgresql", name)})
return config
def request_parser(params, url):
for pairs in re.findall(r"[\w]+=[\w?*-]+", url):
logging.debug(pairs)
......@@ -74,8 +63,7 @@ def collect_data(params):
conn = None
try:
logging.debug("Try to connect to the RESIF database.")
conf = database_config() # read connection parameters
conn = psycopg2.connect(**conf) # connect to the RESIF database
conn = psycopg2.connect(os.getenv('PG_DBURI')) # connect to the RESIF database using environment variable
cursor = conn.cursor() # cursor to execute SQL command
logging.debug(conn.get_dsn_parameters())
logging.debug(f"Postgres version : {conn.server_version}")
......
# global constants
FROM_CLIENT = False
FDSN_CLIENT = "RESIF"
DATA_MOUNT_POINT = "/mnt/nfs/summer"
DATA_MOUNT_POINT = os.getenv("DATADIR")
USER_AGENT_TIMESERIES = "resifws-timeseries"
USER_AGENT_TIMESERIES_INVENTORY = "resifws-timeseries_inventory"
USER_AGENT_TIMESERIESPLOT = "resifws-timeseriesplot"
......
......@@ -3,7 +3,7 @@
keys=root
[handlers]
keys=consoleHandler, fileHandler
keys=consoleHandler
[formatters]
keys=generic, verbose
......@@ -12,7 +12,7 @@ keys=generic, verbose
# loggers:
[logger_root]
level=DEBUG
handlers=consoleHandler, fileHandler
handlers=consoleHandler
# handlers:
......@@ -22,14 +22,6 @@ level=DEBUG
formatter=generic
args=(sys.stdout,)
[handler_fileHandler]
class=logging.handlers.TimedRotatingFileHandler
level=DEBUG
formatter=generic
#(filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False, atTime=None)
args=('logs/dev.log', 'd', 1, 2)
# formatters:
[formatter_generic]
#format=[%(asctime)s] %(levelname)s (%(process)d) [%(module)s:%(lineno)d] %(message)s
......
......@@ -3,7 +3,7 @@
keys=root
[handlers]
keys=consoleHandler, fileHandler
keys=consoleHandler
[formatters]
keys=generic, verbose
......@@ -11,24 +11,17 @@ keys=generic, verbose
# loggers:
[logger_root]
level=DEBUG
handlers=consoleHandler, fileHandler
level=INFO
handlers=consoleHandler
# handlers:
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
level=INFO
formatter=generic
args=(sys.stdout,)
[handler_fileHandler]
class=logging.handlers.TimedRotatingFileHandler
level=DEBUG
formatter=generic
#(filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False, atTime=None)
args=('logs/production.log', 'd', 1, 15, None, False, False)
# formatters:
[formatter_generic]
......
......@@ -404,8 +404,7 @@ def is_open_file(paths):
conn = None
try:
conf = database_config()
conn = psycopg2.connect(**conf)
conn = psycopg2.connect(os.getenv('PG_DBURI'))
cursor = conn.cursor()
logging.debug(conn.get_dsn_parameters())
values = ", ".join(f"""('{j}', '{k}')""" for j, k in paths)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment