Skip to content

Add support for kombu #1327

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 15 commits into from
Mar 27, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion newrelic/api/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,9 @@
_logger = logging.getLogger(__name__)

DISTRIBUTED_TRACE_KEYS_REQUIRED = ("ty", "ac", "ap", "tr", "ti")
DISTRIBUTED_TRACE_TRANSPORT_TYPES = set(("HTTP", "HTTPS", "Kafka", "JMS", "IronMQ", "AMQP", "Queue", "Other"))
DISTRIBUTED_TRACE_TRANSPORT_TYPES = set(
("HTTP", "HTTPS", "Kafka", "JMS", "IronMQ", "AMQP", "Queue", "SQS", "REDIS", "ZooKeeper", "Other")
)
DELIMITER_FORMAT_RE = re.compile("[ \t]*,[ \t]*")
ACCEPTED_DISTRIBUTED_TRACE = 1
CREATED_DISTRIBUTED_TRACE = 2
Expand Down
5 changes: 4 additions & 1 deletion newrelic/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2846,7 +2846,10 @@ def _process_module_builtin_defaults():
_process_module_definition(
"kafka.coordinator.heartbeat", "newrelic.hooks.messagebroker_kafkapython", "instrument_kafka_heartbeat"
)

_process_module_definition("kombu.messaging", "newrelic.hooks.messagebroker_kombu", "instrument_kombu_messaging")
_process_module_definition(
"kombu.serialization", "newrelic.hooks.messagebroker_kombu", "instrument_kombu_serializaion"
)
_process_module_definition("logging", "newrelic.hooks.logger_logging", "instrument_logging")

_process_module_definition("loguru", "newrelic.hooks.logger_loguru", "instrument_loguru")
Expand Down
4 changes: 4 additions & 0 deletions newrelic/core/attribute.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,10 @@
"host.displayName",
"http.statusCode",
"http.url",
"kafka.consume.channel_id",
"kafka.consume.byteCount",
"kombu.consume.channel_id",
"kombu.consume.byteCount",
"llm",
"message.queueName",
"message.routingKey",
Expand Down
27 changes: 27 additions & 0 deletions newrelic/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,10 @@ def emit(self, record):
_logger.addHandler(_NullHandler())


def parse_space_separated_into_list(string):
return string.split()


def _map_aws_account_id(s, logger):
# The AWS account id must be a 12 digit number.
# See https://docs.aws.amazon.com/accounts/latest/reference/manage-acct-identifiers.html#awsaccountid.
Expand Down Expand Up @@ -425,6 +429,18 @@ class InstrumentationGraphQLSettings(Settings):
pass


class InstrumentationKombuSettings(Settings):
pass


class InstrumentationKombuIgnoredExchangesSettings(Settings):
pass


class InstrumentationKombuConsumerSettings(Settings):
enabled = False


class EventHarvestConfigSettings(Settings):
nested = True
_lock = threading.Lock()
Expand Down Expand Up @@ -488,6 +504,9 @@ class EventHarvestConfigHarvestLimitSettings(Settings):
_settings.infinite_tracing = InfiniteTracingSettings()
_settings.instrumentation = InstrumentationSettings()
_settings.instrumentation.graphql = InstrumentationGraphQLSettings()
_settings.instrumentation.kombu = InstrumentationKombuSettings()
_settings.instrumentation.kombu.ignored_exchanges = InstrumentationKombuIgnoredExchangesSettings()
_settings.instrumentation.kombu.consumer = InstrumentationKombuConsumerSettings()
_settings.message_tracer = MessageTracerSettings()
_settings.process_host = ProcessHostSettings()
_settings.rum = RumSettings()
Expand Down Expand Up @@ -877,6 +896,14 @@ def default_otlp_host(host):
"NEW_RELIC_INSTRUMENTATION_GRAPHQL_CAPTURE_INTROSPECTION_QUERIES", False
)

# celeryev is the monitoring queue for rabbitmq which we do not need to monitor-it just makes a lot of noise.
_settings.instrumentation.kombu.ignored_exchanges = parse_space_separated_into_list(
os.environ.get("NEW_RELIC_INSTRUMENTATION_KOMBU_IGNORED_EXCHANGES", "celeryev")
)
_settings.instrumentation.kombu.consumer.enabled = _environ_as_bool(
"NEW_RELIC_INSTRUMENTATION_KOMBU_CONSUMER_ENABLED", default=False
)

_settings.event_harvest_config.harvest_limits.analytic_event_data = _environ_as_int(
"NEW_RELIC_ANALYTICS_EVENTS_MAX_SAMPLES_STORED", DEFAULT_RESERVOIR_SIZE
)
Expand Down
228 changes: 228 additions & 0 deletions newrelic/hooks/messagebroker_kombu.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
# Copyright 2010 New Relic, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys

from newrelic.api.application import application_instance, application_settings
from newrelic.api.function_trace import FunctionTrace
from newrelic.api.message_trace import MessageTrace
from newrelic.api.message_transaction import MessageTransaction
from newrelic.api.time_trace import current_trace, notice_error
from newrelic.api.transaction import current_transaction
from newrelic.common.object_wrapper import ObjectProxy, function_wrapper, wrap_function_wrapper
from newrelic.common.package_version_utils import get_package_version
from newrelic.common.signature import bind_args

_logger = logging.getLogger(__name__)

"""
The following are unsupported transport types since the libraries are too old:
* librabbitmq
* qpid
* amqp uses librabbitmq or py-amqp
"""
AVAILABLE_TRANSPORTS = {
"py-amqp": "AMQP",
"sqs": "SQS",
"redis": "REDIS",
"zookeeper": "ZooKeeper",
"confluentkafka": "Kafka",
}


def wrap_Producer_publish(wrapped, instance, args, kwargs):
transaction = current_transaction()

if transaction is None:
return wrapped(*args, **kwargs)

bound_args = bind_args(wrapped, args, kwargs)
headers = bound_args["headers"]
headers = headers if headers else {}
value = bound_args["body"]
key = bound_args["routing_key"]
exchange = getattr(bound_args["exchange"], "name", None) or "Default"

transaction.add_messagebroker_info("Kombu", get_package_version("kombu"))

with MessageTrace(
library="Kombu",
operation="Produce",
destination_type="Exchange",
destination_name=exchange,
source=wrapped,
terminal=False,
):
dt_headers = {k: v.encode("utf-8") for k, v in MessageTrace.generate_request_headers(transaction)}
if headers:
dt_headers.update(headers)

try:
bound_args["headers"] = dt_headers
return wrapped(**bound_args)
except Exception:
notice_error()
raise


def wrap_consumer_recieve_callback(wrapped, instance, args, kwargs):
# In cases where Kombu is being used to talk to the queue via Celery (aka Celery
# is the toplevel api) a transaction will be created for Kombu and a separate
# transaction will be created for Celery. If instrumentation.kombu.consumer.enabled
# is disabled, do not create the duplicate Kombu transaction.
settings = application_settings() or global_settings()
if not settings.instrumentation.kombu.consumer.enabled:
return wrapped(*args, **kwargs)

# This will be the transaction if any that is created by this wrapper.
created_transaction = None

bound_args = bind_args(wrapped, args, kwargs)
message = bound_args["message"]
if message:
# In Kombu there is not iterator, instead there is a callback that
# is called inside wrapped.
# This callback can be called either outside of a transaction, or
# within the context of an existing transaction. There are 3
# possibilities we need to handle: (Note that this is similar to
# our Pika and Celery instrumentation)
#
# 1. In an inactive transaction
#
# If the end_of_transaction() or ignore_transaction() API
# calls have been invoked, this iterator may be called in the
# context of an inactive transaction. In this case, don't wrap
# the callback in any way.
#
# 2. In an active transaction
#
# Do nothing.
#
# 3. Outside of a transaction
#
# Since it's not running inside of an existing transaction, we
# want to create a new background transaction for it.
body = getattr(message, "body", None)
key = getattr(message, "delivery_info", {}).get("routing_key")
library = "Kombu"
destination_type = "Exchange"
destination_name = getattr(message, "delivery_info", {}).get("exchange") or "Default"
received_bytes = len(str(body).encode("utf-8"))
message_count = 1
transaction = current_transaction(active_only=False)
if not transaction and destination_name not in settings.instrumentation.kombu.ignored_exchanges:
# Try to get the transport type. The default for kombu is py-amqp.
# If not in the known transport type list, fallback to "Other".
try:
transport_name = getattr(
getattr(getattr(instance, "connection", None), "transport", None), "driver_name", "py-amqp"
)
transport_type = AVAILABLE_TRANSPORTS.get(transport_name.lower(), "Other")
except Exception:
_logger.debug("Failed to determine transport type.", exc_info=True)
transport_type = "Other"
created_transaction = MessageTransaction(
application=application_instance(),
library=library,
destination_type=destination_type,
destination_name=destination_name,
headers=dict(getattr(message, "headers", {})),
transport_type=transport_type,
routing_key=key,
source=wrapped,
)
created_transaction.__enter__() # pylint: disable=C2801
created_transaction.destination_name = destination_name

# Obtain consumer client_id to send up as agent attribute
if hasattr(message, "channel") and hasattr(message.channel, "channel_id"):
channel_id = message.channel.channel_id
created_transaction._add_agent_attribute("kombu.consume.channel_id", channel_id)
if received_bytes:
created_transaction._add_agent_attribute("kombu.consume.byteCount", received_bytes)

transaction = current_transaction()
if transaction: # If there is an active transaction now.
# Add metrics whether or not a transaction was already active, or one was just started.
# Don't add metrics if there was an inactive transaction.
# Name the metrics using the same format as the transaction, but in case the active transaction
# was an existing one and not a message transaction, reproduce the naming logic here.
group = f"Message/{library}/{destination_type}"
name = f"Named/{destination_name}"
if received_bytes:
transaction.record_custom_metric(f"{group}/{name}/Received/Bytes", received_bytes)
if message_count:
transaction.record_custom_metric(f"{group}/{name}/Received/Messages", message_count)
transaction.add_messagebroker_info("Kombu", get_package_version("kombu"))

try:
return_val = wrapped(*args, **kwargs)
except Exception:
if created_transaction:
created_transaction.__exit__(*sys.exc_info())
elif current_transaction():
# Report error on existing transaction if there is one
notice_error()
else:
# Report error on application
notice_error(application=application_instance(activate=False))
raise

if created_transaction and not created_transaction.stopped:
created_transaction.__exit__(*sys.exc_info())

return return_val


def wrap_serialize(wrapped, instance, args, kwargs):
transaction = current_transaction()
if not transaction:
return wrapped(*args, **kwargs)

exchange = "Unknown"
if isinstance(transaction, MessageTransaction):
exchange = transaction.destination_name
else:
# Find parent message trace to retrieve topic
message_trace = current_trace()
while message_trace is not None and not isinstance(message_trace, MessageTrace):
message_trace = message_trace.parent
if message_trace:
exchange = message_trace.destination_name

group = f"MessageBroker/Kombu/Exchange"
name = f"Named/{exchange}/Serialization/Value"

with FunctionTrace(name=name, group=group) as ft:
return wrapped(*args, **kwargs)


def instrument_kombu_messaging(module):
if hasattr(module, "Producer"):
wrap_function_wrapper(module, "Producer.publish", wrap_Producer_publish)
if hasattr(module, "Consumer"):
wrap_function_wrapper(module, "Consumer._receive_callback", wrap_consumer_recieve_callback)
# This is a little unorthodox but because Kombu creates an object on import we
# have to instrument it where it's used/imported as opposed to where the class is
# defined.
if hasattr(module, "dumps"):
wrap_function_wrapper(module, "dumps", wrap_serialize)


def instrument_kombu_serializaion(module):
# This is a little unorthodox but because Kombu creates an object on import we
# have to instrument it where it's used/imported as opposed to where the class is
# defined.
if hasattr(module, "loads"):
wrap_function_wrapper(module, "loads", wrap_serialize)
Loading
Loading