# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
"""Module for all logging methods/classes that don't need the ORM."""
from __future__ import annotations
import collections
import contextlib
import enum
import io
import logging
import types
import typing as t
__all__ = ('AIIDA_LOGGER', 'override_log_level')
# Custom logging level, intended specifically for informative log messages reported during WorkChains.
# We want the level between INFO(20) and WARNING(30) such that it will be logged for the default loglevel, however
# the value 25 is already reserved for SUBWARNING by the multiprocessing module.
LOG_LEVEL_REPORT = 23
# Add the custom log level to the :mod:`logging` module and add a corresponding report logging method.
logging.addLevelName(LOG_LEVEL_REPORT, 'REPORT')
[docs]
def report(self: logging.Logger, msg, *args, **kwargs):
"""Log a message at the ``REPORT`` level."""
self.log(LOG_LEVEL_REPORT, msg, *args, **kwargs)
[docs]
class AiidaLoggerType(logging.Logger):
[docs]
def report(self, msg: str, *args, **kwargs) -> None:
"""Log a message at the ``REPORT`` level."""
setattr(logging, 'REPORT', LOG_LEVEL_REPORT)
setattr(logging.Logger, 'report', report)
setattr(logging.LoggerAdapter, 'report', report)
# Convenience dictionary of available log level names and their log level integer
LOG_LEVELS = {
logging.getLevelName(logging.NOTSET): logging.NOTSET,
logging.getLevelName(logging.DEBUG): logging.DEBUG,
logging.getLevelName(logging.INFO): logging.INFO,
logging.getLevelName(LOG_LEVEL_REPORT): LOG_LEVEL_REPORT,
logging.getLevelName(logging.WARNING): logging.WARNING,
logging.getLevelName(logging.ERROR): logging.ERROR,
logging.getLevelName(logging.CRITICAL): logging.CRITICAL,
}
LogLevels = enum.Enum('LogLevels', {key: key for key in LOG_LEVELS}) # type: ignore[misc]
AIIDA_LOGGER = t.cast(AiidaLoggerType, logging.getLogger('aiida'))
CLI_ACTIVE: bool | None = None
"""Flag that is set to ``True`` if the module is imported by ``verdi`` being called."""
CLI_LOG_LEVEL: str | None = None
"""Set if ``verdi`` is called with ``--verbosity`` flag specified, and is set to corresponding log level."""
# The default logging dictionary for AiiDA that can be used in conjunction
# with the config.dictConfig method of python's logging module
[docs]
def get_logging_config():
from aiida.manage.configuration import get_config_option
return {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d '
'%(thread)d %(message)s',
},
'halfverbose': {
'format': '%(asctime)s <%(process)d> %(name)s: [%(levelname)s] %(message)s',
'datefmt': '%m/%d/%Y %I:%M:%S %p',
},
'cli': {
'class': 'aiida.cmdline.utils.log.CliFormatter'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'halfverbose',
},
'cli': {
'class': 'aiida.cmdline.utils.log.CliHandler',
'formatter': 'cli'
},
},
'loggers': {
'aiida': {
'handlers': ['console'],
'level': lambda: get_config_option('logging.aiida_loglevel'),
'propagate': True,
},
'verdi': {
'handlers': ['cli'],
'level': lambda: get_config_option('logging.verdi_loglevel'),
'propagate': False,
},
'plumpy': {
'handlers': ['console'],
'level': lambda: get_config_option('logging.plumpy_loglevel'),
'propagate': False,
},
'kiwipy': {
'handlers': ['console'],
'level': lambda: get_config_option('logging.kiwipy_loglevel'),
'propagate': False,
},
'paramiko': {
'handlers': ['console'],
'level': lambda: get_config_option('logging.paramiko_loglevel'),
'propagate': False,
},
'alembic': {
'handlers': ['console'],
'level': lambda: get_config_option('logging.alembic_loglevel'),
'propagate': False,
},
'aio_pika': {
'handlers': ['console'],
'level': lambda: get_config_option('logging.aiopika_loglevel'),
'propagate': False,
},
'sqlalchemy': {
'handlers': ['console'],
'level': lambda: get_config_option('logging.sqlalchemy_loglevel'),
'propagate': False,
'qualname': 'sqlalchemy.engine',
},
'py.warnings': {
'handlers': ['console'],
},
},
}
[docs]
def evaluate_logging_configuration(dictionary):
"""Recursively evaluate the logging configuration, calling lambdas when encountered.
This allows the configuration options that are dependent on the active profile to be loaded lazily.
:return: evaluated logging configuration dictionary
"""
result = {}
for key, value in dictionary.items():
if isinstance(value, collections.abc.Mapping):
result[key] = evaluate_logging_configuration(value)
elif isinstance(value, types.LambdaType): # pylint: disable=no-member
result[key] = value()
else:
result[key] = value
return result
[docs]
@contextlib.contextmanager
def override_log_level(level=logging.CRITICAL):
"""Temporarily adjust the log-level of logger."""
logging.disable(level=level)
try:
yield
finally:
logging.disable(level=logging.NOTSET)
[docs]
@contextlib.contextmanager
def capture_logging(logger: logging.Logger = AIIDA_LOGGER) -> t.Generator[io.StringIO, None, None]:
"""Capture logging to a stream in memory.
Note, this only copies any content that is being logged to a stream in memory. It does not interfere with any other
existing stream handlers. In this sense, this context manager is non-destructive.
:param logger: The logger whose output to capture.
:returns: A stream to which the logged content is captured.
"""
stream = io.StringIO()
handler = logging.StreamHandler(stream)
logger.addHandler(handler)
try:
yield stream
finally:
logger.removeHandler(handler)