Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions src/components/controller/controller.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import asyncio
import logging
import traceback
from datetime import datetime
from typing import Any, Coroutine

Expand Down Expand Up @@ -74,8 +73,7 @@ async def _queue_task(monitor: Monitor, tasks: list[str]) -> None:
except Exception:
prometheus_task_queue_error_count.inc()

_logger.error("Error while queueing the task, reverting queued state")
_logger.error(traceback.format_exc().strip())
_logger.error("Error while queueing the task, reverting queued state", exc_info=True)
await monitor.set_queued(False)


Expand Down
5 changes: 2 additions & 3 deletions src/components/executor/event_handler.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import asyncio
import json
import logging
import traceback
from functools import partial
from typing import Any

Expand Down Expand Up @@ -91,6 +90,6 @@ async def run(message: dict[Any, Any]) -> None:
prometheus_reaction_error_count.labels(**prometheus_labels).inc()
_logger.error(
f"Error executing reaction '{reaction_name}' with payload "
f"'{json.dumps(event_payload.to_dict())}'"
f"'{json.dumps(event_payload.to_dict())}'",
exc_info=True,
)
_logger.error(traceback.format_exc().strip())
4 changes: 1 addition & 3 deletions src/components/executor/monitor_handler.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import asyncio
import json
import logging
import traceback
from datetime import datetime
from typing import Any, Literal, cast

Expand Down Expand Up @@ -385,8 +384,7 @@ async def run(message: dict[Any, Any]) -> None:
monitor_error_count = prometheus_monitor_error_count.labels(**prometheus_labels)
monitor_error_count.inc()

_logger.error(f"Error in execution for monitor '{monitor}'")
_logger.error(traceback.format_exc().strip())
_logger.error(f"Error in execution for monitor '{monitor}'", exc_info=True)
_logger.info("Exception caught successfully, going on")
finally:
# Cancel the heartbeat task
Expand Down
6 changes: 3 additions & 3 deletions src/components/executor/request_handler.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import asyncio
import json
import logging
import traceback
from typing import Any, Callable, Coroutine, cast

import prometheus_client
Expand Down Expand Up @@ -130,5 +129,6 @@ async def run(message: dict[Any, Any]) -> None:
raise e
except Exception:
prometheus_request_error_count.labels(action_name=action_name).inc()
_logger.error(f"Error executing request '{json.dumps(message_payload.to_dict())}'")
_logger.error(traceback.format_exc().strip())
_logger.error(
f"Error executing request '{json.dumps(message_payload.to_dict())}'", exc_info=True
)
3 changes: 1 addition & 2 deletions src/components/executor/runner.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import asyncio
import json
import logging
import traceback
from typing import Any, Callable, Coroutine

import prometheus_client
Expand Down Expand Up @@ -92,7 +91,7 @@ async def process_message(
except Exception:
prometheus_message_error_count.labels(message_type=message_type).inc()

self._logger.error(traceback.format_exc().strip())
self._logger.error("Error processing message", exc_info=True)
self._logger.error(f"Message: '{json.dumps(self.message.content)}'")
self._logger.info("Exception caught successfully, going on")
finally:
Expand Down
5 changes: 3 additions & 2 deletions src/databases/databases.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,9 @@ async def init() -> None:
await pool.init()
_pools[database_name] = pool
except Exception:
_logger.error(traceback.format_exc().strip())
_logger.info(f"Skipping pool for '{env_var_name}'")
_logger.error(
f"Error initializing pool for database '{env_var_name}', skipping", exc_info=True
)


async def _fetch(
Expand Down
4 changes: 1 addition & 3 deletions src/plugins/plugins_loader.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import importlib
import logging
import os
import traceback
from pathlib import Path
from types import ModuleType

Expand Down Expand Up @@ -39,7 +38,6 @@ def load_plugins(path: str | None = None) -> dict[str, ModuleType]:
plugin = importlib.import_module(f"{plugins_import_path}.{plugin_name}")
plugins[plugin_name] = plugin
except Exception:
_logger.error(f"Error loading plugin '{plugin_name}'")
_logger.error(traceback.format_exc().strip())
_logger.error(f"Error loading plugin '{plugin_name}'", exc_info=True)

return plugins
6 changes: 2 additions & 4 deletions src/utils/exception_handling.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import asyncio
import logging
import traceback
from contextlib import contextmanager
from typing import Coroutine, Generator

Expand All @@ -25,7 +24,7 @@ def catch_exceptions(
except BaseSentinelaException as e:
logger.error(str(e))
except Exception:
logger.error(traceback.format_exc().strip())
logger.error("Got an error", exc_info=True)
if error_message:
logger.error(error_message)
logger.info("Exception caught successfully, going on")
Expand All @@ -35,5 +34,4 @@ async def protected_task(logger: logging.Logger, task: Coroutine[None, None, Non
try:
await task
except Exception:
logger.error(f"Exception with task '{task}'")
logger.error(traceback.format_exc().strip())
logger.error(f"Exception with task '{task}'", exc_info=True)
27 changes: 16 additions & 11 deletions src/utils/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,31 +11,36 @@ class FriendlyFormatter(logging.Formatter):
BOLD_RED = "\x1b[31;1m"
RESET_COLOR = "\x1b[0m"

FORMATS: dict[int, str]
COLOR_FORMAT = {
logging.DEBUG: (GREY, RESET_COLOR),
logging.INFO: (GREY, RESET_COLOR),
logging.WARNING: (YELLOW, RESET_COLOR),
logging.ERROR: (RED, RESET_COLOR),
logging.CRITICAL: (BOLD_RED, RESET_COLOR),
}

_log_format: str

def __init__(self, log_format: str | None = None) -> None:
"""Initialize the log formatter."""
if log_format is None:
log_format = "%(asctime)s [%(levelname)s]: %(message)s"

self.FORMATS = {
logging.DEBUG: self.GREY + log_format + self.RESET_COLOR,
logging.INFO: self.GREY + log_format + self.RESET_COLOR,
logging.WARNING: self.YELLOW + log_format + self.RESET_COLOR,
logging.ERROR: self.RED + log_format + self.RESET_COLOR,
logging.CRITICAL: self.BOLD_RED + log_format + self.RESET_COLOR,
}
self._log_format = log_format

def format(self, record: logging.LogRecord) -> str:
"""Format the log record in a friendly way"""
log_format = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_format)
return formatter.format(record)
prefix, suffix = self.COLOR_FORMAT.get(record.levelno, ("", ""))

formatter = logging.Formatter(self._log_format)
return prefix + formatter.format(record) + suffix


class JsonFormatter(logging.Formatter):
fields: dict[str, str]

def __init__(self, fields: dict[str, str] | None = None) -> None:
"""Initialize the JSON log formatter."""
if fields is None:
fields = {"message": "message"}

Expand Down
2 changes: 1 addition & 1 deletion tests/databases/test_databases.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ async def init_mock(self):

assert databases._pools.keys() == {"application"}

assert_message_in_log(caplog, "Skipping pool for 'DATABASE_LOCAL'")
assert_message_in_log(caplog, "Error initializing pool for database 'DATABASE_LOCAL', skipping")
assert_message_in_log(caplog, "ValueError: some error")


Expand Down
12 changes: 8 additions & 4 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@ def assert_message_in_log(caplog, pattern: str, count: int = 1, regex: bool = Fa
found = 0
for record in caplog.records:
if regex:
if re.search(pattern, record.message):
message_match = re.search(pattern, record.message)
exception_match = record.exc_text and re.search(pattern, record.exc_text)
if message_match or exception_match:
found += 1
else:
if pattern in record.message:
if pattern in record.message or (record.exc_text and pattern in record.exc_text):
found += 1
if found != count:
assert False, f"Expected {count} log messages '{pattern}', found {found}"
Expand All @@ -19,8 +21,10 @@ def assert_message_not_in_log(caplog, pattern: str, regex: bool = False):
"""Check if the logs do not contain the specified message"""
for record in caplog.records:
if regex:
if re.search(pattern, record.message):
message_match = re.search(pattern, record.message)
exception_match = record.exc_text and re.search(pattern, record.exc_text)
if message_match or exception_match:
assert False, f"Found log message: {pattern}"
else:
if pattern in record.message:
if pattern in record.message or (record.exc_text and pattern in record.exc_text):
assert False, f"Found log message: {pattern}"
3 changes: 0 additions & 3 deletions tests/utils/test_exception_handling.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ async def no_error() -> None:
async def test_protected_task_error(caplog, mocker):
"""'protected_task' should log the exception message if an exception is raised"""
logger = logging.getLogger("test_protected_task")
logger_error_spy: MagicMock = mocker.spy(logger, "error")

async def error() -> None:
raise ValueError("should be raised")
Expand All @@ -116,5 +115,3 @@ async def error() -> None:

assert_message_in_log(caplog, "Exception with task")
assert_message_in_log(caplog, "ValueError: should be raised")

assert logger_error_spy.call_count == 2