authentik.lib.logging

logging helpers

  1"""logging helpers"""
  2
  3import logging
  4from logging import Logger
  5from os import getpid
  6
  7import structlog
  8from django.db import connection
  9
 10from authentik.lib.config import CONFIG
 11
 12LOG_PRE_CHAIN = [
 13    # Add the log level and a timestamp to the event_dict if the log entry
 14    # is not from structlog.
 15    structlog.stdlib.add_log_level,
 16    structlog.stdlib.add_logger_name,
 17    structlog.processors.TimeStamper(fmt="iso", utc=False),
 18    structlog.processors.StackInfoRenderer(),
 19]
 20
 21
 22def get_log_level():
 23    """Get log level, clamp trace to debug"""
 24    level = CONFIG.get("log_level").upper()
 25    # We could add a custom level to stdlib logging and structlog, but it's not easy or clean
 26    # https://stackoverflow.com/questions/54505487/custom-log-level-not-working-with-structlog
 27    # Additionally, the entire code uses debug as highest level
 28    # so that would have to be re-written too
 29    if level == "TRACE":
 30        level = "DEBUG"
 31    return level
 32
 33
 34def structlog_configure():
 35    """Configure structlog itself"""
 36    structlog.configure_once(
 37        processors=[
 38            structlog.stdlib.add_log_level,
 39            structlog.stdlib.add_logger_name,
 40            structlog.contextvars.merge_contextvars,
 41            add_process_id,
 42            add_tenant_information,
 43            structlog.stdlib.PositionalArgumentsFormatter(),
 44            structlog.processors.TimeStamper(fmt="iso", utc=False),
 45            structlog.processors.StackInfoRenderer(),
 46            structlog.processors.ExceptionRenderer(
 47                structlog.tracebacks.ExceptionDictTransformer(show_locals=CONFIG.get_bool("debug"))
 48            ),
 49            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
 50        ],
 51        logger_factory=structlog.stdlib.LoggerFactory(),
 52        wrapper_class=structlog.make_filtering_bound_logger(
 53            getattr(logging, get_log_level(), logging.WARNING)
 54        ),
 55        cache_logger_on_first_use=True,
 56    )
 57
 58
 59def get_logger_config():
 60    """Configure python stdlib's logging"""
 61    debug = CONFIG.get_bool("debug")
 62    global_level = get_log_level()
 63    base_config = {
 64        "version": 1,
 65        "disable_existing_loggers": False,
 66        "formatters": {
 67            "json": {
 68                "()": structlog.stdlib.ProcessorFormatter,
 69                "processor": structlog.processors.JSONRenderer(sort_keys=True),
 70                "foreign_pre_chain": LOG_PRE_CHAIN
 71                + [
 72                    structlog.processors.ExceptionRenderer(
 73                        structlog.tracebacks.ExceptionDictTransformer(
 74                            show_locals=CONFIG.get_bool("debug")
 75                        )
 76                    ),
 77                ],
 78            },
 79            "console": {
 80                "()": structlog.stdlib.ProcessorFormatter,
 81                "processor": structlog.dev.ConsoleRenderer(colors=debug),
 82                "foreign_pre_chain": LOG_PRE_CHAIN,
 83            },
 84        },
 85        "handlers": {
 86            "console": {
 87                "level": "DEBUG",
 88                "class": "logging.StreamHandler",
 89                "formatter": "console" if debug else "json",
 90            },
 91        },
 92        "loggers": {},
 93    }
 94
 95    handler_level_map = {
 96        "": global_level,
 97        "authentik": global_level,
 98        "django": "WARNING",
 99        "django.request": "ERROR",
100        "selenium": "WARNING",
101        "docker": "WARNING",
102        "urllib3": "WARNING",
103        "websockets": "WARNING",
104        "daphne": "WARNING",
105        "kubernetes": "INFO",
106        "asyncio": "WARNING",
107        "fsevents": "WARNING",
108        "uvicorn": "WARNING",
109        "gunicorn": "INFO",
110        "requests_mock": "WARNING",
111        "hpack": "WARNING",
112        "httpx": "WARNING",
113        "azure": "WARNING",
114        "httpcore": "WARNING",
115    }
116    for handler_name, level in handler_level_map.items():
117        base_config["loggers"][handler_name] = {
118            "handlers": ["console"],
119            "level": level,
120            "propagate": False,
121        }
122    return base_config
123
124
125def add_process_id(logger: Logger, method_name: str, event_dict):
126    """Add the current process ID"""
127    event_dict["pid"] = getpid()
128    return event_dict
129
130
131def add_tenant_information(logger: Logger, method_name: str, event_dict):
132    """Add the current tenant"""
133    tenant = getattr(connection, "tenant", None)
134    schema_name = getattr(connection, "schema_name", None)
135    if tenant is not None:
136        event_dict["schema_name"] = tenant.schema_name
137        event_dict["domain_url"] = getattr(tenant, "domain_url", None)
138    elif schema_name is not None:
139        event_dict["schema_name"] = schema_name
140    return event_dict
LOG_PRE_CHAIN = [<function add_log_level>, <function add_logger_name>, <structlog.processors.TimeStamper object>, <structlog.processors.StackInfoRenderer object>]
def get_log_level():
23def get_log_level():
24    """Get log level, clamp trace to debug"""
25    level = CONFIG.get("log_level").upper()
26    # We could add a custom level to stdlib logging and structlog, but it's not easy or clean
27    # https://stackoverflow.com/questions/54505487/custom-log-level-not-working-with-structlog
28    # Additionally, the entire code uses debug as highest level
29    # so that would have to be re-written too
30    if level == "TRACE":
31        level = "DEBUG"
32    return level

Get log level, clamp trace to debug

def structlog_configure():
35def structlog_configure():
36    """Configure structlog itself"""
37    structlog.configure_once(
38        processors=[
39            structlog.stdlib.add_log_level,
40            structlog.stdlib.add_logger_name,
41            structlog.contextvars.merge_contextvars,
42            add_process_id,
43            add_tenant_information,
44            structlog.stdlib.PositionalArgumentsFormatter(),
45            structlog.processors.TimeStamper(fmt="iso", utc=False),
46            structlog.processors.StackInfoRenderer(),
47            structlog.processors.ExceptionRenderer(
48                structlog.tracebacks.ExceptionDictTransformer(show_locals=CONFIG.get_bool("debug"))
49            ),
50            structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
51        ],
52        logger_factory=structlog.stdlib.LoggerFactory(),
53        wrapper_class=structlog.make_filtering_bound_logger(
54            getattr(logging, get_log_level(), logging.WARNING)
55        ),
56        cache_logger_on_first_use=True,
57    )

Configure structlog itself

def get_logger_config():
 60def get_logger_config():
 61    """Configure python stdlib's logging"""
 62    debug = CONFIG.get_bool("debug")
 63    global_level = get_log_level()
 64    base_config = {
 65        "version": 1,
 66        "disable_existing_loggers": False,
 67        "formatters": {
 68            "json": {
 69                "()": structlog.stdlib.ProcessorFormatter,
 70                "processor": structlog.processors.JSONRenderer(sort_keys=True),
 71                "foreign_pre_chain": LOG_PRE_CHAIN
 72                + [
 73                    structlog.processors.ExceptionRenderer(
 74                        structlog.tracebacks.ExceptionDictTransformer(
 75                            show_locals=CONFIG.get_bool("debug")
 76                        )
 77                    ),
 78                ],
 79            },
 80            "console": {
 81                "()": structlog.stdlib.ProcessorFormatter,
 82                "processor": structlog.dev.ConsoleRenderer(colors=debug),
 83                "foreign_pre_chain": LOG_PRE_CHAIN,
 84            },
 85        },
 86        "handlers": {
 87            "console": {
 88                "level": "DEBUG",
 89                "class": "logging.StreamHandler",
 90                "formatter": "console" if debug else "json",
 91            },
 92        },
 93        "loggers": {},
 94    }
 95
 96    handler_level_map = {
 97        "": global_level,
 98        "authentik": global_level,
 99        "django": "WARNING",
100        "django.request": "ERROR",
101        "selenium": "WARNING",
102        "docker": "WARNING",
103        "urllib3": "WARNING",
104        "websockets": "WARNING",
105        "daphne": "WARNING",
106        "kubernetes": "INFO",
107        "asyncio": "WARNING",
108        "fsevents": "WARNING",
109        "uvicorn": "WARNING",
110        "gunicorn": "INFO",
111        "requests_mock": "WARNING",
112        "hpack": "WARNING",
113        "httpx": "WARNING",
114        "azure": "WARNING",
115        "httpcore": "WARNING",
116    }
117    for handler_name, level in handler_level_map.items():
118        base_config["loggers"][handler_name] = {
119            "handlers": ["console"],
120            "level": level,
121            "propagate": False,
122        }
123    return base_config

Configure python stdlib's logging

def add_process_id(logger: logging.Logger, method_name: str, event_dict):
126def add_process_id(logger: Logger, method_name: str, event_dict):
127    """Add the current process ID"""
128    event_dict["pid"] = getpid()
129    return event_dict

Add the current process ID

def add_tenant_information(logger: logging.Logger, method_name: str, event_dict):
132def add_tenant_information(logger: Logger, method_name: str, event_dict):
133    """Add the current tenant"""
134    tenant = getattr(connection, "tenant", None)
135    schema_name = getattr(connection, "schema_name", None)
136    if tenant is not None:
137        event_dict["schema_name"] = tenant.schema_name
138        event_dict["domain_url"] = getattr(tenant, "domain_url", None)
139    elif schema_name is not None:
140        event_dict["schema_name"] = schema_name
141    return event_dict

Add the current tenant