Commit bb5bc352 authored by bourgesl's avatar bourgesl
Browse files

Merge branch 'release/2021_01_07'

parents 8122a2bd fd242e91
Pipeline #56203 skipped with stage
#!/bin/bash
obsportal-cli database run-sql-script db/_clear_db.sql --settings=dev-local.ini
obsportal-cli database create-structure --skip_exist --debug --settings=dev-local.ini
obsportal-cli database init --debug --settings=dev-local.ini
echo "That's all folks !!!"
......@@ -82,7 +82,7 @@ listen =
##https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html
#################################################
[loggers]
keys = root, obsportal, waitress, sqlalchemy, sqlalchemy_pool, alembic, urllib
keys = root, obsportal, obsportal_query_cache, waitress, sqlalchemy, sqlalchemy_pool, alembic, urllib
[handlers]
keys = console, file
......@@ -99,6 +99,11 @@ level = INFO
handlers =
qualname = obsportal
[logger_obsportal_query_cache]
level = INFO
handlers =
qualname = obsportal.model.utils
[logger_sqlalchemy]
level = WARN
handlers =
......@@ -108,7 +113,7 @@ qualname = sqlalchemy.engine
# "level = WARN" logs neither. (Recommended for production systems.)
[logger_sqlalchemy_pool]
level = INFO
level = WARN
handlers =
qualname = sqlalchemy.pool
......
......@@ -87,7 +87,7 @@ listen = *:6543
##https://docs.pylonsproject.org/projects/pyramid/en/latest/narr/logging.html
#################################################
[loggers]
keys = root, obsportal, waitress, sqlalchemy, sqlalchemy_pool, urllib
keys = root, obsportal, obsportal_query_cache, waitress, sqlalchemy, sqlalchemy_pool, alembic, urllib
[handlers]
keys = console, file
......@@ -104,6 +104,11 @@ level = DEBUG
handlers =
qualname = obsportal
[logger_obsportal_query_cache]
level = DEBUG
handlers =
qualname = obsportal.model.utils
[logger_sqlalchemy]
level = INFO
handlers =
......@@ -117,6 +122,11 @@ level = INFO
handlers =
qualname = sqlalchemy.pool
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[logger_waitress]
level = INFO
handlers =
......
doc/obsdb.png

172 KB | W: | H:

doc/obsdb.png

178 KB | W: | H:

doc/obsdb.png
doc/obsdb.png
doc/obsdb.png
doc/obsdb.png
  • 2-up
  • Swipe
  • Onion skin
......@@ -129,6 +129,8 @@ def create(encoding, template, structure, settings, log, debug, skip_exist):
config = Configurator(settings=app_settings)
# CHECK IF DB EXISTS
logger.info(f"database:create(): db url = '{engine.url}'")
if not skip_exist and database_exists(engine.url):
click.echo("ERROR: The database already exists")
else:
......
......@@ -26,7 +26,7 @@ def votable_exposure_search(settings, log, debug):
# Init
before_command(config_uri, log, debug)
logger.info(f"profile:votable_exposure_search()")
logger.info("profile:votable_exposure_search()")
env = bootstrap(config_uri)
#cp = cProfile.Profile()
......
......@@ -23,7 +23,7 @@ def cli(limit, settings, log, debug, check_lockfile):
# Init
before_command(config_uri, log, debug)
logger.info(f"synchronize:cli()")
logger.info("synchronize:cli()")
env = bootstrap(config_uri)
# Lock file
......
......@@ -2,7 +2,6 @@
import os
import click
import logging
import pytest
from obsportal.cli.tools import get_root_path
logger = logging.getLogger('obsportal.cli.eso')
......@@ -15,6 +14,7 @@ def cli():
pytest_args = [
f'-c{config_uri}'
]
import pytest
pytest.main(pytest_args)
else:
click.echo("Please configure the 'testing.ini' file.")
......@@ -24,14 +24,19 @@ configure_mappers()
def get_engine(settings, prefix='sqlalchemy.'):
engine = engine_from_config(settings, prefix)
logger.debug(f"get_engine(): {engine} | {engine.pool.status()}")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"get_engine(): {engine} | {engine.pool.status()}")
return engine
def get_session_factory(engine):
factory = sessionmaker()
# disable auto-flush to minimize SQL queries:
factory = sessionmaker(autoflush=False)
factory.configure(bind=engine)
logger.debug(f"get_session_factory(): {factory}")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"get_session_factory(): {factory}")
return factory
......
......@@ -37,7 +37,10 @@ class DatabaseTag(Base):
@classmethod
def is_current_db_version(cls, db_session):
version = DatabaseTag.get_db_version(db_session)
logger.debug(f"is_current_db_version(): {version}")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"is_current_db_version(): {version}")
if not version:
return False
if version != CurrentDatabaseTagVersion:
......@@ -49,14 +52,14 @@ class DatabaseTag(Base):
@classmethod
def get(cls, db_session, oid):
try:
return db_session.query(cls).autoflush(False).get(oid)
return db_session.query(cls).get(oid)
except:
return None
@classmethod
def list(cls, db_session):
try:
return db_session.query(cls).autoflush(False).all()
return db_session.query(cls).all()
except:
return None
......
......@@ -20,6 +20,7 @@ from sqlalchemy_jsonfield import JSONField
from sqlalchemy_utc import UtcDateTime
from .meta import Base
from .utils import QueryUtil
from .utils.xmatch import XMatch
logger = logging.getLogger(__name__)
......@@ -215,7 +216,7 @@ class Exposure(Base):
self.seeing if self.seeing else float_nan,
str(self.date_updated),
str(self.date_release),
str(self.date_release) if self.date_release else '',
# OBSERVATION
self.observation_id,
......@@ -413,7 +414,7 @@ class Exposure(Base):
('%.15g' % self.seeing) if self.seeing else str_nan,
str(self.date_updated),
str(self.date_release),
str(self.date_release) if self.date_release else '',
# OBSERVATION
self.observation_id,
......@@ -510,10 +511,7 @@ class Exposure(Base):
@classmethod
def get(cls, db_session, oid):
try:
return db_session.query(cls).autoflush(False).get(oid)
except:
return None
return QueryUtil.queryGet(cls, db_session, oid) # No cache
@classmethod
def search(cls, db_session,
......@@ -624,7 +622,8 @@ class Exposure(Base):
query = query.order_by(Exposure.mjd_start.asc())
else:
if order_by_field:
logger.debug(f"order_by_field: {order_by_field} asc: {order_asc}")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"order_by_field: {order_by_field} asc: {order_asc}")
if order_asc:
query = query.order_by(order_by_field.asc())
else:
......@@ -638,10 +637,13 @@ class Exposure(Base):
query = query.limit(maxrec)
# Execute query
logger.debug(f"Search(Exposure): {query}")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"Search(Exposure): {query}")
if as_query:
return query.params(statement_params).yield_per(100) # process by chunks
results = query.params(statement_params).all()
logger.debug(f"Search(Exposure): {len(results)} results")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"Search(Exposure): {len(results)} results")
return results
......@@ -18,6 +18,7 @@ from sqlalchemy_jsonfield import JSONField
from sqlalchemy_utc import UtcDateTime
from .meta import Base
from .utils import QueryUtil
logger = logging.getLogger(__name__)
......@@ -142,10 +143,7 @@ class Header(Base):
@classmethod
def get(cls, db_session, oid):
try:
return db_session.query(cls).autoflush(False).get(oid)
except:
return None
return QueryUtil.queryGet(cls, db_session, oid) # No cache
@classmethod
def search(cls, db_session,
......@@ -183,9 +181,13 @@ class Header(Base):
query = query.limit(1000)
# Execute query
logger.debug(f"Search(Header): {query}")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"Search(Header): {query}")
results = query.params(statement_params).all()
logger.debug(f"Search(Header): {len(results)} results")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"Search(Header): {len(results)} results")
return results
......
......@@ -13,6 +13,7 @@ from sqlalchemy import (
from sqlalchemy.orm import relationship
from .meta import Base
from .utils import QueryUtil
logger = logging.getLogger(__name__)
......@@ -45,14 +46,14 @@ class Instrument(Base):
@classmethod
def update_stats(cls, db_session):
logger.debug(f"update_stats()")
logger.debug("update_stats()")
cls._update_headers_count(db_session)
cls._update_observations_count(db_session)
cls._update_exposures_count(db_session)
@classmethod
def _update_headers_count(cls, db_session):
logger.debug(f"_update_headers_count()")
logger.debug("_update_headers_count()")
query = '''
UPDATE obsportal.instrument AS i
SET headers_count = (
......@@ -65,7 +66,7 @@ class Instrument(Base):
@classmethod
def _update_observations_count(cls, db_session):
logger.debug(f"_update_observations_count()")
logger.debug("_update_observations_count()")
query = '''
UPDATE obsportal.instrument AS i
SET observations_count = (
......@@ -78,7 +79,7 @@ class Instrument(Base):
@classmethod
def _update_exposures_count(cls, db_session):
logger.debug(f"_update_exposures_count()")
logger.debug("_update_exposures_count()")
query = '''
UPDATE obsportal.instrument AS i
SET exposures_count = (
......@@ -93,10 +94,7 @@ class Instrument(Base):
@classmethod
def get(cls, db_session, oid):
try:
return db_session.query(cls).autoflush(False).get(oid)
except:
return None
return QueryUtil.queryGetCached(cls, db_session, oid)
class InstrumentMode(Base):
......@@ -126,13 +124,13 @@ class InstrumentMode(Base):
@classmethod
def update_stats(cls, db_session):
logger.debug(f"update_stats()")
logger.debug("update_stats()")
cls._update_observations_count(db_session)
cls._update_exposures_count(db_session)
@classmethod
def _update_observations_count(cls, db_session):
logger.debug(f"_update_observations_count()")
logger.debug("_update_observations_count()")
query = '''
UPDATE obsportal.instrument_mode AS im
SET observations_count = (
......@@ -145,7 +143,7 @@ class InstrumentMode(Base):
@classmethod
def _update_exposures_count(cls, db_session):
logger.debug(f"_update_exposures_count()")
logger.debug("_update_exposures_count()")
query = '''
UPDATE obsportal.instrument_mode AS im
SET exposures_count = (
......@@ -160,7 +158,5 @@ class InstrumentMode(Base):
@classmethod
def find(cls, db_session, instrument_id, name):
try:
return db_session.query(cls).autoflush(False).filter_by(instrument_id=instrument_id, name=name).one_or_none()
except:
return None
return QueryUtil.queryFilterCached(cls, db_session,
instrument_id=instrument_id, name=name)
......@@ -11,9 +11,10 @@ from sqlalchemy import (
from sqlalchemy.orm import relationship
from .meta import Base
from .utils import QueryUtil
logger = logging.getLogger(__name__)
# See https://www.eso.org/observing/etc/doc/viscalc/vltistations.html
class Interferometer(Base):
......@@ -43,14 +44,14 @@ class Interferometer(Base):
@classmethod
def update_stats(cls, db_session):
logger.debug(f"update_stats()")
logger.debug("update_stats()")
cls._update_headers_count(db_session)
cls._update_observations_count(db_session)
cls._update_exposures_count(db_session)
@classmethod
def _update_headers_count(cls, db_session):
logger.debug(f"_update_headers_count()")
logger.debug("_update_headers_count()")
query = '''
UPDATE obsportal.interferometer AS i
SET headers_count = (
......@@ -63,7 +64,7 @@ class Interferometer(Base):
@classmethod
def _update_observations_count(cls, db_session):
logger.debug(f"_update_observations_count()")
logger.debug("_update_observations_count()")
query = '''
UPDATE obsportal.interferometer AS i
SET observations_count = (
......@@ -76,7 +77,7 @@ class Interferometer(Base):
@classmethod
def _update_exposures_count(cls, db_session):
logger.debug(f"_update_exposures_count()")
logger.debug("_update_exposures_count()")
query = '''
UPDATE obsportal.interferometer AS i
SET exposures_count = (
......@@ -91,8 +92,4 @@ class Interferometer(Base):
@classmethod
def get(cls, db_session, oid):
try:
return db_session.query(cls).autoflush(False).get(oid)
except:
return None
return QueryUtil.queryGetCached(cls, db_session, oid)
......@@ -19,6 +19,7 @@ from .enums.observation import EnumObservationCategory
from .enums.program import findProgramType
from .meta import Base
from .program import Program
from .utils import QueryUtil
from .utils.xmatch import XMatch
logger = logging.getLogger(__name__)
......@@ -27,7 +28,7 @@ logger = logging.getLogger(__name__)
class Observation(Base):
__tablename__ = 'observation'
__table_args__ = (
#Index('ix_obsportal_observation_ra_dec_spatial', text("GIST(spoint(radians(target_ra),radians(target_dec)))")),
# Index('ix_obsportal_observation_ra_dec_spatial', text("GIST(spoint(radians(target_ra),radians(target_dec)))")),
{'schema': 'obsportal'}
)
......@@ -64,7 +65,8 @@ class Observation(Base):
target = relationship('Target', back_populates='observations')
# EXPOSURES ########################################################################################################
exposures = relationship('Exposure', back_populates='observation', cascade='all, delete-orphan', passive_deletes=True)
exposures = relationship('Exposure', back_populates='observation', cascade='all, delete-orphan',
passive_deletes=True)
# STATS ############################################################################################################
......@@ -72,12 +74,12 @@ class Observation(Base):
@classmethod
def update_stats(cls, db_session):
logger.debug(f"update_stats()")
logger.debug("update_stats()")
cls._update_exposures_count(db_session)
@classmethod
def _update_exposures_count(cls, db_session):
logger.debug(f"_update_exposures_count()")
logger.debug("_update_exposures_count()")
query = '''
UPDATE obsportal.observation AS o
SET exposures_count = (
......@@ -139,10 +141,24 @@ class Observation(Base):
@classmethod
def get(cls, db_session, oid):
try:
return db_session.query(cls).autoflush(False).get(oid)
except:
return None
return QueryUtil.queryGetCached(cls, db_session, oid)
@classmethod
def find(cls, db_session, interferometer=None, instrument=None, instrument_mode=None, instrument_submode=None,
target=None, stations=None, pops=None, category=None, program_id=None):
# Use most selective criteria first (target, program, instrument_mode):
return QueryUtil.queryFilterCached(cls, db_session,
target=target,
program_id=program_id,
instrument_mode=instrument_mode,
instrument=instrument,
instrument_submode=instrument_submode,
stations=stations,
pops=pops,
category=category,
interferometer=interferometer
) # see QueryUtil.flush
@classmethod
def factory(cls, db_session, attributes, category=None, program_id=None, target=None, interferometer=None,
......@@ -181,26 +197,19 @@ class Observation(Base):
observation.instrument_submode = instrument_submode
observation.target = target
return observation
# cache new observation to be queryable (memory) by observation.find() method:
QueryUtil.flush(cls, db_session, observation,
target=target,
program_id=program_id,
instrument_mode=instrument_mode,
instrument=instrument,
instrument_submode=instrument_submode,
stations=stations,
pops=pops,
category=category,
interferometer=interferometer) # see QueryUtil.queryFilterCached
@classmethod
def find(cls, db_session, interferometer=None, instrument=None, instrument_mode=None, instrument_submode=None,
target=None, stations=None, pops=None, category=None, program_id=None):
try:
# Use most selective criteria first (target, program, instrument_mode):
return db_session.query(cls).autoflush(False).filter_by(
target=target,
program_id=program_id,
instrument_mode=instrument_mode,
instrument=instrument,
instrument_submode=instrument_submode,
stations=stations,
pops=pops,
category=category,
interferometer=interferometer
).one_or_none()
except:
return None
return observation
@classmethod
def search(cls, db_session,
......@@ -267,7 +276,11 @@ class Observation(Base):
query = query.limit(1000)
# Execute query
logger.debug(f"Search(Observation): {query}")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"Search(Observation): {query}")
results = query.params(statement_params).all()
logger.debug(f"Search(Observation): {len(results)} results")
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f"Search(Observation): {len(results)} results")
return results
......@@ -17,6 +17,7 @@ from .meta import Base
# See https://archive.eso.org/cms/eso-data/eso-programme-identification-code.html
from .utils import QueryUtil
logger = logging.getLogger(__name__)
......@@ -43,17 +44,17 @@ class Program(Base):
@classmethod
def get(cls, db_session, oid):
try:
return db_session.query(cls).autoflush(False).get(oid)
except:
return None
return QueryUtil.queryGetCached(cls, db_session, oid)
@classmethod
def factory(cls, db_session, program_id, program_title, program_type, pi_coi, program=None):
is_new = False
if not program:
program = cls.get(db_session, program_id)
if program is None:
is_new = True
program = cls()
program.id = program_id
......@@ -63,4 +64,8 @@ class Program(Base):
program.type = program_type
program.pi_coi = pi_coi
if is_new:
# cache new program to be queryable (memory) by program.get() method:
QueryUtil.flush(cls, db_session, program)
return program
......@@ -15,6 +15,7 @@ from sqlalchemy import (
from sqlalchemy.orm import relationship
from .meta import Base
from .utils import QueryUtil
from .utils.xmatch import XMatch
logger = logging.getLogger(__name__)
......@@ -45,13 +46,13 @@ class Target(Base):
@classmethod
def update_stats(cls, db_session):
logger.debug(f"update_stats()")
logger.debug("update_stats()")
cls._update_observations_count(db_session)
cls._update_exposures_count(db_session)
@classmethod
def _update_observations_count(cls, db_session):
logger.debug(f"_update_observations_count()")
logger.debug("_update_observations_count()")
query = '''
UPDATE obsportal.target AS t
SET observations_count = (
......@@ -64,7 +65,7 @@ class Target(Base):
@classmethod
def _update_exposures_count(cls, db_session):
logger.debug(f"_update_exposures_count()")
logger.debug("_update_exposures_count()")
query = '''
UPDATE obsportal.target AS t
SET exposures_count = (
......@@ -110,10 +111,13 @@ class Target(Base):
@classmethod
def get(cls, db_session, oid):
try:
return db_session.query(cls).autoflush(False).get(oid)
except:
return None
return QueryUtil.queryGetCached(cls, db_session, oid)
@classmethod
def find(cls, db_session, name, ra,