From 65d62b631a73795e70d1cd82fc32a61e69176306 Mon Sep 17 00:00:00 2001 From: fagnerzulin Date: Thu, 20 Feb 2025 11:33:25 -0300 Subject: [PATCH] Adds async Elasticsearch support --- newrelic/config.py | 1357 +++++------------ newrelic/hooks/datastore_elasticsearch.py | 242 ++- tests/datastore_elasticsearch/conftest.py | 7 + .../test_async_connection.py | 63 + .../test_async_database_duration.py | 71 + .../test_async_elasticsearch.py | 238 +++ .../test_async_instrumented_methods.py | 131 ++ .../test_async_mget.py | 143 ++ .../test_async_multiple_dbs.py | 110 ++ .../test_async_trace_node.py | 92 ++ .../test_async_transport.py | 84 + tox.ini | 1 + 12 files changed, 1539 insertions(+), 1000 deletions(-) create mode 100644 tests/datastore_elasticsearch/test_async_connection.py create mode 100644 tests/datastore_elasticsearch/test_async_database_duration.py create mode 100644 tests/datastore_elasticsearch/test_async_elasticsearch.py create mode 100644 tests/datastore_elasticsearch/test_async_instrumented_methods.py create mode 100644 tests/datastore_elasticsearch/test_async_mget.py create mode 100644 tests/datastore_elasticsearch/test_async_multiple_dbs.py create mode 100644 tests/datastore_elasticsearch/test_async_trace_node.py create mode 100644 tests/datastore_elasticsearch/test_async_transport.py diff --git a/newrelic/config.py b/newrelic/config.py index 952a41b70..6886ca37e 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -47,13 +47,12 @@ agent_control_health_instance, agent_control_healthcheck_loop, ) -from newrelic.core.config import ( - Settings, - apply_config_setting, - default_host, - fetch_config_setting, +from newrelic.core.config import Settings, apply_config_setting, default_host, fetch_config_setting +from newrelic.core.agent_control_health import ( + HealthStatus, + agent_control_health_instance, + agent_control_healthcheck_loop, ) -from newrelic.core.agent_control_health import HealthStatus, agent_control_health_instance, agent_control_healthcheck_loop __all__ = ["initialize", "filter_app_factory"] @@ -75,11 +74,7 @@ def _map_aws_account_id(s): # This will be used to validate what is provided and issue warnings # if feature flags not in set are provided. -_FEATURE_FLAGS = set( - [ - "django.instrumentation.inclusion-tags.r1", - ] -) +_FEATURE_FLAGS = set(["django.instrumentation.inclusion-tags.r1"]) # Names of configuration file and deployment environment. This # will be overridden by the load_configuration() function when @@ -365,12 +360,7 @@ def _process_configuration(section): _process_setting(section, "memory_runtime_pid_metrics.enabled", "getboolean", None) _process_setting(section, "thread_profiler.enabled", "getboolean", None) _process_setting(section, "transaction_tracer.enabled", "getboolean", None) - _process_setting( - section, - "transaction_tracer.transaction_threshold", - "get", - _map_transaction_threshold, - ) + _process_setting(section, "transaction_tracer.transaction_threshold", "get", _map_transaction_threshold) _process_setting(section, "transaction_tracer.record_sql", "get", _map_record_sql) _process_setting(section, "transaction_tracer.stack_trace_threshold", "getfloat", None) _process_setting(section, "transaction_tracer.explain_enabled", "getboolean", None) @@ -379,37 +369,17 @@ def _process_configuration(section): _process_setting(section, "transaction_tracer.generator_trace", "get", _map_split_strings) _process_setting(section, "transaction_tracer.top_n", "getint", None) _process_setting(section, "transaction_tracer.attributes.enabled", "getboolean", None) - _process_setting( - section, - "transaction_tracer.attributes.exclude", - "get", - _map_inc_excl_attributes, - ) - _process_setting( - section, - "transaction_tracer.attributes.include", - "get", - _map_inc_excl_attributes, - ) + _process_setting(section, "transaction_tracer.attributes.exclude", "get", _map_inc_excl_attributes) + _process_setting(section, "transaction_tracer.attributes.include", "get", _map_inc_excl_attributes) _process_setting(section, "error_collector.enabled", "getboolean", None) _process_setting(section, "error_collector.capture_events", "getboolean", None) _process_setting(section, "error_collector.max_event_samples_stored", "getint", None) _process_setting(section, "error_collector.capture_source", "getboolean", None) _process_setting(section, "error_collector.ignore_errors", "get", _map_split_strings) _process_setting(section, "error_collector.ignore_classes", "get", _map_split_strings) - _process_setting( - section, - "error_collector.ignore_status_codes", - "get", - _merge_ignore_status_codes, - ) + _process_setting(section, "error_collector.ignore_status_codes", "get", _merge_ignore_status_codes) _process_setting(section, "error_collector.expected_classes", "get", _map_split_strings) - _process_setting( - section, - "error_collector.expected_status_codes", - "get", - _merge_expected_status_codes, - ) + _process_setting(section, "error_collector.expected_status_codes", "get", _merge_expected_status_codes) _process_setting(section, "error_collector.attributes.enabled", "getboolean", None) _process_setting(section, "error_collector.attributes.exclude", "get", _map_inc_excl_attributes) _process_setting(section, "error_collector.attributes.include", "get", _map_inc_excl_attributes) @@ -420,35 +390,15 @@ def _process_configuration(section): _process_setting(section, "browser_monitoring.ssl_for_http", "getboolean", None) _process_setting(section, "browser_monitoring.content_type", "get", _map_split_strings) _process_setting(section, "browser_monitoring.attributes.enabled", "getboolean", None) - _process_setting( - section, - "browser_monitoring.attributes.exclude", - "get", - _map_inc_excl_attributes, - ) - _process_setting( - section, - "browser_monitoring.attributes.include", - "get", - _map_inc_excl_attributes, - ) + _process_setting(section, "browser_monitoring.attributes.exclude", "get", _map_inc_excl_attributes) + _process_setting(section, "browser_monitoring.attributes.include", "get", _map_inc_excl_attributes) _process_setting(section, "slow_sql.enabled", "getboolean", None) _process_setting(section, "synthetics.enabled", "getboolean", None) _process_setting(section, "transaction_events.enabled", "getboolean", None) _process_setting(section, "transaction_events.max_samples_stored", "getint", None) _process_setting(section, "transaction_events.attributes.enabled", "getboolean", None) - _process_setting( - section, - "transaction_events.attributes.exclude", - "get", - _map_inc_excl_attributes, - ) - _process_setting( - section, - "transaction_events.attributes.include", - "get", - _map_inc_excl_attributes, - ) + _process_setting(section, "transaction_events.attributes.exclude", "get", _map_inc_excl_attributes) + _process_setting(section, "transaction_events.attributes.include", "get", _map_inc_excl_attributes) _process_setting(section, "custom_insights_events.enabled", "getboolean", None) _process_setting(section, "custom_insights_events.max_samples_stored", "getint", None) _process_setting(section, "custom_insights_events.max_attribute_value", "getint", None) @@ -461,18 +411,8 @@ def _process_configuration(section): _process_setting(section, "span_events.attributes.exclude", "get", _map_inc_excl_attributes) _process_setting(section, "span_events.attributes.include", "get", _map_inc_excl_attributes) _process_setting(section, "transaction_segments.attributes.enabled", "getboolean", None) - _process_setting( - section, - "transaction_segments.attributes.exclude", - "get", - _map_inc_excl_attributes, - ) - _process_setting( - section, - "transaction_segments.attributes.include", - "get", - _map_inc_excl_attributes, - ) + _process_setting(section, "transaction_segments.attributes.exclude", "get", _map_inc_excl_attributes) + _process_setting(section, "transaction_segments.attributes.include", "get", _map_inc_excl_attributes) _process_setting(section, "local_daemon.socket_path", "get", None) _process_setting(section, "local_daemon.synchronous_startup", "getboolean", None) _process_setting(section, "agent_limits.transaction_traces_nodes", "getint", None) @@ -526,12 +466,7 @@ def _process_configuration(section): _process_setting(section, "utilization.total_ram_mib", "getint", None) _process_setting(section, "utilization.billing_hostname", "get", None) _process_setting(section, "strip_exception_messages.enabled", "getboolean", None) - _process_setting( - section, - "strip_exception_messages.allowlist", - "get", - _map_strip_exception_messages_allowlist, - ) + _process_setting(section, "strip_exception_messages.allowlist", "get", _map_strip_exception_messages_allowlist) _process_setting(section, "datastore_tracer.instance_reporting.enabled", "getboolean", None) _process_setting(section, "datastore_tracer.database_name_reporting.enabled", "getboolean", None) _process_setting(section, "heroku.use_dyno_names", "getboolean", None) @@ -540,12 +475,7 @@ def _process_configuration(section): _process_setting(section, "apdex_t", "getfloat", None) _process_setting(section, "event_loop_visibility.enabled", "getboolean", None) _process_setting(section, "event_loop_visibility.blocking_threshold", "getfloat", None) - _process_setting( - section, - "event_harvest_config.harvest_limits.analytic_event_data", - "getint", - None, - ) + _process_setting(section, "event_harvest_config.harvest_limits.analytic_event_data", "getint", None) _process_setting(section, "event_harvest_config.harvest_limits.custom_event_data", "getint", None) _process_setting(section, "event_harvest_config.harvest_limits.ml_event_data", "getint", None) _process_setting(section, "event_harvest_config.harvest_limits.span_event_data", "getint", None) @@ -730,68 +660,27 @@ def translate_deprecated_settings(settings, cached_settings): cached = dict(cached_settings) deprecated_settings_map = [ - ( - "transaction_tracer.capture_attributes", - "transaction_tracer.attributes.enabled", - ), + ("transaction_tracer.capture_attributes", "transaction_tracer.attributes.enabled"), ("error_collector.capture_attributes", "error_collector.attributes.enabled"), - ( - "browser_monitoring.capture_attributes", - "browser_monitoring.attributes.enabled", - ), - ( - "analytics_events.capture_attributes", - "transaction_events.attributes.enabled", - ), + ("browser_monitoring.capture_attributes", "browser_monitoring.attributes.enabled"), + ("analytics_events.capture_attributes", "transaction_events.attributes.enabled"), ("analytics_events.enabled", "transaction_events.enabled"), - ( - "analytics_events.max_samples_stored", - "event_harvest_config.harvest_limits.analytic_event_data", - ), - ( - "transaction_events.max_samples_stored", - "event_harvest_config.harvest_limits.analytic_event_data", - ), - ( - "span_events.max_samples_stored", - "event_harvest_config.harvest_limits.span_event_data", - ), - ( - "error_collector.max_event_samples_stored", - "event_harvest_config.harvest_limits.error_event_data", - ), - ( - "custom_insights_events.max_samples_stored", - "event_harvest_config.harvest_limits.custom_event_data", - ), - ( - "application_logging.forwarding.max_samples_stored", - "event_harvest_config.harvest_limits.log_event_data", - ), - ( - "error_collector.ignore_errors", - "error_collector.ignore_classes", - ), - ( - "strip_exception_messages.whitelist", - "strip_exception_messages.allowlist", - ), + ("analytics_events.max_samples_stored", "event_harvest_config.harvest_limits.analytic_event_data"), + ("transaction_events.max_samples_stored", "event_harvest_config.harvest_limits.analytic_event_data"), + ("span_events.max_samples_stored", "event_harvest_config.harvest_limits.span_event_data"), + ("error_collector.max_event_samples_stored", "event_harvest_config.harvest_limits.error_event_data"), + ("custom_insights_events.max_samples_stored", "event_harvest_config.harvest_limits.custom_event_data"), + ("application_logging.forwarding.max_samples_stored", "event_harvest_config.harvest_limits.log_event_data"), + ("error_collector.ignore_errors", "error_collector.ignore_classes"), + ("strip_exception_messages.whitelist", "strip_exception_messages.allowlist"), ] for old_key, new_key in deprecated_settings_map: if old_key in cached: - _logger.info( - "Deprecated setting found: %r. Please use new setting: %r.", - old_key, - new_key, - ) + _logger.info("Deprecated setting found: %r. Please use new setting: %r.", old_key, new_key) if new_key in cached: - _logger.info( - "Ignoring deprecated setting: %r. Using new setting: %r.", - old_key, - new_key, - ) + _logger.info("Ignoring deprecated setting: %r. Using new setting: %r.", old_key, new_key) else: apply_config_setting(settings, new_key, cached[old_key]) _logger.info("Applying value of deprecated setting %r to %r.", old_key, new_key) @@ -828,8 +717,7 @@ def translate_deprecated_settings(settings, cached_settings): if attr_value not in excluded_attrs: settings.attributes.exclude.append(attr_value) _logger.info( - "Applying value of deprecated setting ignored_params to attributes.exclude: %r.", - attr_value, + "Applying value of deprecated setting ignored_params to attributes.exclude: %r.", attr_value ) delete_setting(settings, "ignored_params") @@ -961,13 +849,7 @@ def _toml_config_to_configparser_dict(d, top=None, _path=None): return top -def _load_configuration( - config_file=None, - environment=None, - ignore_errors=True, - log_file=None, - log_level=None, -): +def _load_configuration(config_file=None, environment=None, ignore_errors=True, log_file=None, log_level=None): global _configuration_done global _config_file @@ -1948,15 +1830,7 @@ def _startup_data_source(): agent_instance = newrelic.core.agent.agent_instance() - for ( - section, - module, - object_path, - application, - name, - settings, - properties, - ) in _data_sources: + for section, module, object_path, application, name, settings, properties in _data_sources: try: source = getattr(newrelic.api.import_hook.import_module(module), object_path) @@ -2107,9 +1981,7 @@ def _process_trace_cache_import_hooks(): def _process_module_builtin_defaults(): _process_module_definition( - "openai.api_resources.embedding", - "newrelic.hooks.mlmodel_openai", - "instrument_openai_api_resources_embedding", + "openai.api_resources.embedding", "newrelic.hooks.mlmodel_openai", "instrument_openai_api_resources_embedding" ) _process_module_definition( "openai.api_resources.chat_completion", @@ -2117,25 +1989,15 @@ def _process_module_builtin_defaults(): "instrument_openai_api_resources_chat_completion", ) _process_module_definition( - "openai.resources.embeddings", - "newrelic.hooks.mlmodel_openai", - "instrument_openai_resources_embeddings", - ) - _process_module_definition( - "openai.util", - "newrelic.hooks.mlmodel_openai", - "instrument_openai_util", + "openai.resources.embeddings", "newrelic.hooks.mlmodel_openai", "instrument_openai_resources_embeddings" ) + _process_module_definition("openai.util", "newrelic.hooks.mlmodel_openai", "instrument_openai_util") _process_module_definition( "openai.api_resources.abstract.engine_api_resource", "newrelic.hooks.mlmodel_openai", "instrument_openai_api_resources_abstract_engine_api_resource", ) - _process_module_definition( - "openai._streaming", - "newrelic.hooks.mlmodel_openai", - "instrument_openai__streaming", - ) + _process_module_definition("openai._streaming", "newrelic.hooks.mlmodel_openai", "instrument_openai__streaming") _process_module_definition( "openai.resources.chat.completions", @@ -2144,20 +2006,12 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "openai.resources.completions", - "newrelic.hooks.mlmodel_openai", - "instrument_openai_resources_chat_completions", - ) - _process_module_definition( - "openai._base_client", - "newrelic.hooks.mlmodel_openai", - "instrument_openai_base_client", + "openai.resources.completions", "newrelic.hooks.mlmodel_openai", "instrument_openai_resources_chat_completions" ) + _process_module_definition("openai._base_client", "newrelic.hooks.mlmodel_openai", "instrument_openai_base_client") _process_module_definition( - "asyncio.base_events", - "newrelic.hooks.coroutines_asyncio", - "instrument_asyncio_base_events", + "asyncio.base_events", "newrelic.hooks.coroutines_asyncio", "instrument_asyncio_base_events" ) _process_module_definition( @@ -2171,14 +2025,10 @@ def _process_module_builtin_defaults(): "instrument_langchain_core_runnables_config", ) _process_module_definition( - "langchain.chains.base", - "newrelic.hooks.mlmodel_langchain", - "instrument_langchain_chains_base", + "langchain.chains.base", "newrelic.hooks.mlmodel_langchain", "instrument_langchain_chains_base" ) _process_module_definition( - "langchain_core.callbacks.manager", - "newrelic.hooks.mlmodel_langchain", - "instrument_langchain_callbacks_manager", + "langchain_core.callbacks.manager", "newrelic.hooks.mlmodel_langchain", "instrument_langchain_callbacks_manager" ) # VectorStores with similarity_search method @@ -2741,59 +2591,35 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "langchain_core.tools", - "newrelic.hooks.mlmodel_langchain", - "instrument_langchain_core_tools", + "langchain_core.tools", "newrelic.hooks.mlmodel_langchain", "instrument_langchain_core_tools" ) _process_module_definition( - "langchain_core.callbacks.manager", - "newrelic.hooks.mlmodel_langchain", - "instrument_langchain_callbacks_manager", + "langchain_core.callbacks.manager", "newrelic.hooks.mlmodel_langchain", "instrument_langchain_callbacks_manager" ) - _process_module_definition( - "asyncio.events", - "newrelic.hooks.coroutines_asyncio", - "instrument_asyncio_events", - ) + _process_module_definition("asyncio.events", "newrelic.hooks.coroutines_asyncio", "instrument_asyncio_events") _process_module_definition("asgiref.sync", "newrelic.hooks.adapter_asgiref", "instrument_asgiref_sync") _process_module_definition( - "django.core.handlers.base", - "newrelic.hooks.framework_django", - "instrument_django_core_handlers_base", - ) - _process_module_definition( - "django.core.handlers.asgi", - "newrelic.hooks.framework_django", - "instrument_django_core_handlers_asgi", + "django.core.handlers.base", "newrelic.hooks.framework_django", "instrument_django_core_handlers_base" ) _process_module_definition( - "django.core.handlers.wsgi", - "newrelic.hooks.framework_django", - "instrument_django_core_handlers_wsgi", + "django.core.handlers.asgi", "newrelic.hooks.framework_django", "instrument_django_core_handlers_asgi" ) _process_module_definition( - "django.core.urlresolvers", - "newrelic.hooks.framework_django", - "instrument_django_core_urlresolvers", + "django.core.handlers.wsgi", "newrelic.hooks.framework_django", "instrument_django_core_handlers_wsgi" ) _process_module_definition( - "django.template", - "newrelic.hooks.framework_django", - "instrument_django_template", + "django.core.urlresolvers", "newrelic.hooks.framework_django", "instrument_django_core_urlresolvers" ) + _process_module_definition("django.template", "newrelic.hooks.framework_django", "instrument_django_template") _process_module_definition( - "django.template.loader_tags", - "newrelic.hooks.framework_django", - "instrument_django_template_loader_tags", + "django.template.loader_tags", "newrelic.hooks.framework_django", "instrument_django_template_loader_tags" ) _process_module_definition( - "django.core.servers.basehttp", - "newrelic.hooks.framework_django", - "instrument_django_core_servers_basehttp", + "django.core.servers.basehttp", "newrelic.hooks.framework_django", "instrument_django_core_servers_basehttp" ) _process_module_definition( "django.contrib.staticfiles.views", @@ -2805,114 +2631,58 @@ def _process_module_builtin_defaults(): "newrelic.hooks.framework_django", "instrument_django_contrib_staticfiles_handlers", ) + _process_module_definition("django.views.debug", "newrelic.hooks.framework_django", "instrument_django_views_debug") _process_module_definition( - "django.views.debug", - "newrelic.hooks.framework_django", - "instrument_django_views_debug", + "django.http.multipartparser", "newrelic.hooks.framework_django", "instrument_django_http_multipartparser" ) + _process_module_definition("django.core.mail", "newrelic.hooks.framework_django", "instrument_django_core_mail") _process_module_definition( - "django.http.multipartparser", - "newrelic.hooks.framework_django", - "instrument_django_http_multipartparser", + "django.core.mail.message", "newrelic.hooks.framework_django", "instrument_django_core_mail_message" ) _process_module_definition( - "django.core.mail", - "newrelic.hooks.framework_django", - "instrument_django_core_mail", - ) - _process_module_definition( - "django.core.mail.message", - "newrelic.hooks.framework_django", - "instrument_django_core_mail_message", - ) - _process_module_definition( - "django.views.generic.base", - "newrelic.hooks.framework_django", - "instrument_django_views_generic_base", + "django.views.generic.base", "newrelic.hooks.framework_django", "instrument_django_views_generic_base" ) _process_module_definition( - "django.core.management.base", - "newrelic.hooks.framework_django", - "instrument_django_core_management_base", + "django.core.management.base", "newrelic.hooks.framework_django", "instrument_django_core_management_base" ) _process_module_definition( - "django.template.base", - "newrelic.hooks.framework_django", - "instrument_django_template_base", + "django.template.base", "newrelic.hooks.framework_django", "instrument_django_template_base" ) _process_module_definition( - "django.middleware.gzip", - "newrelic.hooks.framework_django", - "instrument_django_gzip_middleware", + "django.middleware.gzip", "newrelic.hooks.framework_django", "instrument_django_gzip_middleware" ) # New modules in Django 1.10 _process_module_definition( - "django.urls.resolvers", - "newrelic.hooks.framework_django", - "instrument_django_core_urlresolvers", + "django.urls.resolvers", "newrelic.hooks.framework_django", "instrument_django_core_urlresolvers" ) + _process_module_definition("django.urls.base", "newrelic.hooks.framework_django", "instrument_django_urls_base") _process_module_definition( - "django.urls.base", - "newrelic.hooks.framework_django", - "instrument_django_urls_base", - ) - _process_module_definition( - "django.core.handlers.exception", - "newrelic.hooks.framework_django", - "instrument_django_core_handlers_exception", + "django.core.handlers.exception", "newrelic.hooks.framework_django", "instrument_django_core_handlers_exception" ) _process_module_definition("falcon.api", "newrelic.hooks.framework_falcon", "instrument_falcon_api") _process_module_definition("falcon.app", "newrelic.hooks.framework_falcon", "instrument_falcon_app") _process_module_definition( - "falcon.routing.util", - "newrelic.hooks.framework_falcon", - "instrument_falcon_routing_util", + "falcon.routing.util", "newrelic.hooks.framework_falcon", "instrument_falcon_routing_util" ) - _process_module_definition( - "fastapi.routing", - "newrelic.hooks.framework_fastapi", - "instrument_fastapi_routing", - ) + _process_module_definition("fastapi.routing", "newrelic.hooks.framework_fastapi", "instrument_fastapi_routing") _process_module_definition("flask.app", "newrelic.hooks.framework_flask", "instrument_flask_app") - _process_module_definition( - "flask.templating", - "newrelic.hooks.framework_flask", - "instrument_flask_templating", - ) - _process_module_definition( - "flask.blueprints", - "newrelic.hooks.framework_flask", - "instrument_flask_blueprints", - ) + _process_module_definition("flask.templating", "newrelic.hooks.framework_flask", "instrument_flask_templating") + _process_module_definition("flask.blueprints", "newrelic.hooks.framework_flask", "instrument_flask_blueprints") _process_module_definition("flask.views", "newrelic.hooks.framework_flask", "instrument_flask_views") _process_module_definition( - "flask_compress", - "newrelic.hooks.middleware_flask_compress", - "instrument_flask_compress", + "flask_compress", "newrelic.hooks.middleware_flask_compress", "instrument_flask_compress" ) _process_module_definition("flask_restful", "newrelic.hooks.component_flask_rest", "instrument_flask_rest") - _process_module_definition( - "flask_restplus.api", - "newrelic.hooks.component_flask_rest", - "instrument_flask_rest", - ) - _process_module_definition( - "flask_restx.api", - "newrelic.hooks.component_flask_rest", - "instrument_flask_rest", - ) + _process_module_definition("flask_restplus.api", "newrelic.hooks.component_flask_rest", "instrument_flask_rest") + _process_module_definition("flask_restx.api", "newrelic.hooks.component_flask_rest", "instrument_flask_rest") - _process_module_definition( - "graphql_server", - "newrelic.hooks.component_graphqlserver", - "instrument_graphqlserver", - ) + _process_module_definition("graphql_server", "newrelic.hooks.component_graphqlserver", "instrument_graphqlserver") _process_module_definition( "sentry_sdk.integrations.asgi", "newrelic.hooks.component_sentry", "instrument_sentry_sdk_integrations_asgi" @@ -2924,61 +2694,37 @@ def _process_module_builtin_defaults(): _process_module_definition("gluon.contrib.memcache.memcache", "newrelic.hooks.memcache_memcache") _process_module_definition( - "graphene.types.schema", - "newrelic.hooks.framework_graphene", - "instrument_graphene_types_schema", + "graphene.types.schema", "newrelic.hooks.framework_graphene", "instrument_graphene_types_schema" ) + _process_module_definition("graphql.graphql", "newrelic.hooks.framework_graphql", "instrument_graphql") _process_module_definition( - "graphql.graphql", - "newrelic.hooks.framework_graphql", - "instrument_graphql", - ) - _process_module_definition( - "graphql.execution.execute", - "newrelic.hooks.framework_graphql", - "instrument_graphql_execute", + "graphql.execution.execute", "newrelic.hooks.framework_graphql", "instrument_graphql_execute" ) _process_module_definition( - "graphql.execution.executor", - "newrelic.hooks.framework_graphql", - "instrument_graphql_execute", + "graphql.execution.executor", "newrelic.hooks.framework_graphql", "instrument_graphql_execute" ) _process_module_definition( - "graphql.execution.middleware", - "newrelic.hooks.framework_graphql", - "instrument_graphql_execution_middleware", + "graphql.execution.middleware", "newrelic.hooks.framework_graphql", "instrument_graphql_execution_middleware" ) _process_module_definition( - "graphql.execution.utils", - "newrelic.hooks.framework_graphql", - "instrument_graphql_execution_utils", + "graphql.execution.utils", "newrelic.hooks.framework_graphql", "instrument_graphql_execution_utils" ) _process_module_definition( - "graphql.error.located_error", - "newrelic.hooks.framework_graphql", - "instrument_graphql_error_located_error", + "graphql.error.located_error", "newrelic.hooks.framework_graphql", "instrument_graphql_error_located_error" ) _process_module_definition( - "graphql.language.parser", - "newrelic.hooks.framework_graphql", - "instrument_graphql_parser", + "graphql.language.parser", "newrelic.hooks.framework_graphql", "instrument_graphql_parser" ) _process_module_definition( - "graphql.validation.validate", - "newrelic.hooks.framework_graphql", - "instrument_graphql_validate", + "graphql.validation.validate", "newrelic.hooks.framework_graphql", "instrument_graphql_validate" ) _process_module_definition( - "graphql.validation.validation", - "newrelic.hooks.framework_graphql", - "instrument_graphql_validate", + "graphql.validation.validation", "newrelic.hooks.framework_graphql", "instrument_graphql_validate" ) _process_module_definition( - "graphql.type.schema", - "newrelic.hooks.framework_graphql", - "instrument_graphql_schema_get_field", + "graphql.type.schema", "newrelic.hooks.framework_graphql", "instrument_graphql_schema_get_field" ) _process_module_definition( @@ -3062,21 +2808,9 @@ def _process_module_builtin_defaults(): "instrument_google_cloud_firestore_v1_async_transaction", ) - _process_module_definition( - "ariadne.asgi", - "newrelic.hooks.framework_ariadne", - "instrument_ariadne_asgi", - ) - _process_module_definition( - "ariadne.graphql", - "newrelic.hooks.framework_ariadne", - "instrument_ariadne_execute", - ) - _process_module_definition( - "ariadne.wsgi", - "newrelic.hooks.framework_ariadne", - "instrument_ariadne_wsgi", - ) + _process_module_definition("ariadne.asgi", "newrelic.hooks.framework_ariadne", "instrument_ariadne_asgi") + _process_module_definition("ariadne.graphql", "newrelic.hooks.framework_ariadne", "instrument_ariadne_execute") + _process_module_definition("ariadne.wsgi", "newrelic.hooks.framework_ariadne", "instrument_ariadne_wsgi") _process_module_definition("grpc._channel", "newrelic.hooks.framework_grpc", "instrument_grpc__channel") _process_module_definition("grpc._server", "newrelic.hooks.framework_grpc", "instrument_grpc_server") @@ -3084,35 +2818,19 @@ def _process_module_builtin_defaults(): _process_module_definition("bottle", "newrelic.hooks.framework_bottle", "instrument_bottle") _process_module_definition( - "cherrypy._cpreqbody", - "newrelic.hooks.framework_cherrypy", - "instrument_cherrypy__cpreqbody", - ) - _process_module_definition( - "cherrypy._cprequest", - "newrelic.hooks.framework_cherrypy", - "instrument_cherrypy__cprequest", - ) - _process_module_definition( - "cherrypy._cpdispatch", - "newrelic.hooks.framework_cherrypy", - "instrument_cherrypy__cpdispatch", + "cherrypy._cpreqbody", "newrelic.hooks.framework_cherrypy", "instrument_cherrypy__cpreqbody" ) _process_module_definition( - "cherrypy._cpwsgi", - "newrelic.hooks.framework_cherrypy", - "instrument_cherrypy__cpwsgi", + "cherrypy._cprequest", "newrelic.hooks.framework_cherrypy", "instrument_cherrypy__cprequest" ) _process_module_definition( - "cherrypy._cptree", - "newrelic.hooks.framework_cherrypy", - "instrument_cherrypy__cptree", + "cherrypy._cpdispatch", "newrelic.hooks.framework_cherrypy", "instrument_cherrypy__cpdispatch" ) + _process_module_definition("cherrypy._cpwsgi", "newrelic.hooks.framework_cherrypy", "instrument_cherrypy__cpwsgi") + _process_module_definition("cherrypy._cptree", "newrelic.hooks.framework_cherrypy", "instrument_cherrypy__cptree") _process_module_definition( - "confluent_kafka.cimpl", - "newrelic.hooks.messagebroker_confluentkafka", - "instrument_confluentkafka_cimpl", + "confluent_kafka.cimpl", "newrelic.hooks.messagebroker_confluentkafka", "instrument_confluentkafka_cimpl" ) _process_module_definition( "confluent_kafka.serializing_producer", @@ -3126,58 +2844,24 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "kafka.consumer.group", - "newrelic.hooks.messagebroker_kafkapython", - "instrument_kafka_consumer_group", + "kafka.consumer.group", "newrelic.hooks.messagebroker_kafkapython", "instrument_kafka_consumer_group" ) _process_module_definition( - "kafka.producer.kafka", - "newrelic.hooks.messagebroker_kafkapython", - "instrument_kafka_producer", + "kafka.producer.kafka", "newrelic.hooks.messagebroker_kafkapython", "instrument_kafka_producer" ) _process_module_definition( - "kafka.coordinator.heartbeat", - "newrelic.hooks.messagebroker_kafkapython", - "instrument_kafka_heartbeat", + "kafka.coordinator.heartbeat", "newrelic.hooks.messagebroker_kafkapython", "instrument_kafka_heartbeat" ) - _process_module_definition( - "logging", - "newrelic.hooks.logger_logging", - "instrument_logging", - ) + _process_module_definition("logging", "newrelic.hooks.logger_logging", "instrument_logging") - _process_module_definition( - "loguru", - "newrelic.hooks.logger_loguru", - "instrument_loguru", - ) - _process_module_definition( - "loguru._logger", - "newrelic.hooks.logger_loguru", - "instrument_loguru_logger", - ) - _process_module_definition( - "structlog._base", - "newrelic.hooks.logger_structlog", - "instrument_structlog__base", - ) - _process_module_definition( - "structlog._frames", - "newrelic.hooks.logger_structlog", - "instrument_structlog__frames", - ) - _process_module_definition( - "paste.httpserver", - "newrelic.hooks.adapter_paste", - "instrument_paste_httpserver", - ) + _process_module_definition("loguru", "newrelic.hooks.logger_loguru", "instrument_loguru") + _process_module_definition("loguru._logger", "newrelic.hooks.logger_loguru", "instrument_loguru_logger") + _process_module_definition("structlog._base", "newrelic.hooks.logger_structlog", "instrument_structlog__base") + _process_module_definition("structlog._frames", "newrelic.hooks.logger_structlog", "instrument_structlog__frames") + _process_module_definition("paste.httpserver", "newrelic.hooks.adapter_paste", "instrument_paste_httpserver") - _process_module_definition( - "gunicorn.app.base", - "newrelic.hooks.adapter_gunicorn", - "instrument_gunicorn_app_base", - ) + _process_module_definition("gunicorn.app.base", "newrelic.hooks.adapter_gunicorn", "instrument_gunicorn_app_base") _process_module_definition("cassandra", "newrelic.hooks.datastore_cassandradriver", "instrument_cassandra") _process_module_definition( @@ -3204,60 +2888,32 @@ def _process_module_builtin_defaults(): _process_module_definition("psycopg2", "newrelic.hooks.database_psycopg2", "instrument_psycopg2") _process_module_definition( - "psycopg2._psycopg2", - "newrelic.hooks.database_psycopg2", - "instrument_psycopg2__psycopg2", - ) - _process_module_definition( - "psycopg2.extensions", - "newrelic.hooks.database_psycopg2", - "instrument_psycopg2_extensions", + "psycopg2._psycopg2", "newrelic.hooks.database_psycopg2", "instrument_psycopg2__psycopg2" ) _process_module_definition( - "psycopg2._json", - "newrelic.hooks.database_psycopg2", - "instrument_psycopg2__json", - ) - _process_module_definition( - "psycopg2._range", - "newrelic.hooks.database_psycopg2", - "instrument_psycopg2__range", + "psycopg2.extensions", "newrelic.hooks.database_psycopg2", "instrument_psycopg2_extensions" ) + _process_module_definition("psycopg2._json", "newrelic.hooks.database_psycopg2", "instrument_psycopg2__json") + _process_module_definition("psycopg2._range", "newrelic.hooks.database_psycopg2", "instrument_psycopg2__range") _process_module_definition("psycopg2.sql", "newrelic.hooks.database_psycopg2", "instrument_psycopg2_sql") _process_module_definition("psycopg2ct", "newrelic.hooks.database_psycopg2ct", "instrument_psycopg2ct") _process_module_definition( - "psycopg2ct.extensions", - "newrelic.hooks.database_psycopg2ct", - "instrument_psycopg2ct_extensions", + "psycopg2ct.extensions", "newrelic.hooks.database_psycopg2ct", "instrument_psycopg2ct_extensions" ) + _process_module_definition("psycopg2cffi", "newrelic.hooks.database_psycopg2cffi", "instrument_psycopg2cffi") _process_module_definition( - "psycopg2cffi", - "newrelic.hooks.database_psycopg2cffi", - "instrument_psycopg2cffi", - ) - _process_module_definition( - "psycopg2cffi.extensions", - "newrelic.hooks.database_psycopg2cffi", - "instrument_psycopg2cffi_extensions", + "psycopg2cffi.extensions", "newrelic.hooks.database_psycopg2cffi", "instrument_psycopg2cffi_extensions" ) _process_module_definition( - "asyncpg.connect_utils", - "newrelic.hooks.database_asyncpg", - "instrument_asyncpg_connect_utils", - ) - _process_module_definition( - "asyncpg.protocol", - "newrelic.hooks.database_asyncpg", - "instrument_asyncpg_protocol", + "asyncpg.connect_utils", "newrelic.hooks.database_asyncpg", "instrument_asyncpg_connect_utils" ) + _process_module_definition("asyncpg.protocol", "newrelic.hooks.database_asyncpg", "instrument_asyncpg_protocol") _process_module_definition( - "postgresql.driver.dbapi20", - "newrelic.hooks.database_postgresql", - "instrument_postgresql_driver_dbapi20", + "postgresql.driver.dbapi20", "newrelic.hooks.database_postgresql", "instrument_postgresql_driver_dbapi20" ) _process_module_definition( @@ -3270,33 +2926,17 @@ def _process_module_builtin_defaults(): _process_module_definition("sqlite3.dbapi2", "newrelic.hooks.database_sqlite", "instrument_sqlite3_dbapi2") _process_module_definition("pysqlite2", "newrelic.hooks.database_sqlite", "instrument_sqlite3") - _process_module_definition( - "pysqlite2.dbapi2", - "newrelic.hooks.database_sqlite", - "instrument_sqlite3_dbapi2", - ) + _process_module_definition("pysqlite2.dbapi2", "newrelic.hooks.database_sqlite", "instrument_sqlite3_dbapi2") _process_module_definition("memcache", "newrelic.hooks.datastore_memcache", "instrument_memcache") + _process_module_definition("pylibmc.client", "newrelic.hooks.datastore_pylibmc", "instrument_pylibmc_client") _process_module_definition( - "pylibmc.client", - "newrelic.hooks.datastore_pylibmc", - "instrument_pylibmc_client", - ) - _process_module_definition( - "bmemcached.client", - "newrelic.hooks.datastore_bmemcached", - "instrument_bmemcached_client", + "bmemcached.client", "newrelic.hooks.datastore_bmemcached", "instrument_bmemcached_client" ) _process_module_definition( - "pymemcache.client", - "newrelic.hooks.datastore_pymemcache", - "instrument_pymemcache_client", - ) - _process_module_definition( - "aiomcache.client", - "newrelic.hooks.datastore_aiomcache", - "instrument_aiomcache_client", + "pymemcache.client", "newrelic.hooks.datastore_pymemcache", "instrument_pymemcache_client" ) + _process_module_definition("aiomcache.client", "newrelic.hooks.datastore_aiomcache", "instrument_aiomcache_client") _process_module_definition("jinja2.environment", "newrelic.hooks.template_jinja2") @@ -3312,40 +2952,24 @@ def _process_module_builtin_defaults(): _process_module_definition("urllib.request", "newrelic.hooks.external_urllib") _process_module_definition( - "urllib3.connectionpool", - "newrelic.hooks.external_urllib3", - "instrument_urllib3_connectionpool", + "urllib3.connectionpool", "newrelic.hooks.external_urllib3", "instrument_urllib3_connectionpool" ) + _process_module_definition("urllib3.connection", "newrelic.hooks.external_urllib3", "instrument_urllib3_connection") _process_module_definition( - "urllib3.connection", - "newrelic.hooks.external_urllib3", - "instrument_urllib3_connection", - ) - _process_module_definition( - "requests.packages.urllib3.connection", - "newrelic.hooks.external_urllib3", - "instrument_urllib3_connection", + "requests.packages.urllib3.connection", "newrelic.hooks.external_urllib3", "instrument_urllib3_connection" ) _process_module_definition( - "starlette.requests", - "newrelic.hooks.framework_starlette", - "instrument_starlette_requests", + "starlette.requests", "newrelic.hooks.framework_starlette", "instrument_starlette_requests" ) _process_module_definition( - "starlette.routing", - "newrelic.hooks.framework_starlette", - "instrument_starlette_routing", + "starlette.routing", "newrelic.hooks.framework_starlette", "instrument_starlette_routing" ) _process_module_definition( - "starlette.applications", - "newrelic.hooks.framework_starlette", - "instrument_starlette_applications", + "starlette.applications", "newrelic.hooks.framework_starlette", "instrument_starlette_applications" ) _process_module_definition( - "starlette.middleware.errors", - "newrelic.hooks.framework_starlette", - "instrument_starlette_middleware_errors", + "starlette.middleware.errors", "newrelic.hooks.framework_starlette", "instrument_starlette_middleware_errors" ) _process_module_definition( "starlette.middleware.exceptions", @@ -3353,31 +2977,19 @@ def _process_module_builtin_defaults(): "instrument_starlette_middleware_exceptions", ) _process_module_definition( - "starlette.exceptions", - "newrelic.hooks.framework_starlette", - "instrument_starlette_exceptions", + "starlette.exceptions", "newrelic.hooks.framework_starlette", "instrument_starlette_exceptions" ) _process_module_definition( - "starlette.background", - "newrelic.hooks.framework_starlette", - "instrument_starlette_background_task", + "starlette.background", "newrelic.hooks.framework_starlette", "instrument_starlette_background_task" ) _process_module_definition( - "starlette.concurrency", - "newrelic.hooks.framework_starlette", - "instrument_starlette_concurrency", + "starlette.concurrency", "newrelic.hooks.framework_starlette", "instrument_starlette_concurrency" ) - _process_module_definition( - "strawberry.asgi", - "newrelic.hooks.framework_strawberry", - "instrument_strawberry_asgi", - ) + _process_module_definition("strawberry.asgi", "newrelic.hooks.framework_strawberry", "instrument_strawberry_asgi") _process_module_definition( - "strawberry.schema.schema", - "newrelic.hooks.framework_strawberry", - "instrument_strawberry_schema", + "strawberry.schema.schema", "newrelic.hooks.framework_strawberry", "instrument_strawberry_schema" ) _process_module_definition( @@ -3407,42 +3019,22 @@ def _process_module_builtin_defaults(): _process_module_definition("aiohttp.wsgi", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_wsgi") _process_module_definition("aiohttp.web", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_web") _process_module_definition( - "aiohttp.web_reqrep", - "newrelic.hooks.framework_aiohttp", - "instrument_aiohttp_web_response", + "aiohttp.web_reqrep", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_web_response" ) _process_module_definition( - "aiohttp.web_response", - "newrelic.hooks.framework_aiohttp", - "instrument_aiohttp_web_response", + "aiohttp.web_response", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_web_response" ) _process_module_definition( - "aiohttp.web_urldispatcher", - "newrelic.hooks.framework_aiohttp", - "instrument_aiohttp_web_urldispatcher", + "aiohttp.web_urldispatcher", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_web_urldispatcher" ) + _process_module_definition("aiohttp.client", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_client") _process_module_definition( - "aiohttp.client", - "newrelic.hooks.framework_aiohttp", - "instrument_aiohttp_client", - ) - _process_module_definition( - "aiohttp.client_reqrep", - "newrelic.hooks.framework_aiohttp", - "instrument_aiohttp_client_reqrep", - ) - _process_module_definition( - "aiohttp.protocol", - "newrelic.hooks.framework_aiohttp", - "instrument_aiohttp_protocol", + "aiohttp.client_reqrep", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_client_reqrep" ) + _process_module_definition("aiohttp.protocol", "newrelic.hooks.framework_aiohttp", "instrument_aiohttp_protocol") _process_module_definition("requests.api", "newrelic.hooks.external_requests", "instrument_requests_api") - _process_module_definition( - "requests.sessions", - "newrelic.hooks.external_requests", - "instrument_requests_sessions", - ) + _process_module_definition("requests.sessions", "newrelic.hooks.external_requests", "instrument_requests_sessions") _process_module_definition("feedparser", "newrelic.hooks.external_feedparser") @@ -3458,11 +3050,7 @@ def _process_module_builtin_defaults(): _process_module_definition("aredis.client", "newrelic.hooks.datastore_aredis", "instrument_aredis_client") - _process_module_definition( - "aredis.connection", - "newrelic.hooks.datastore_aredis", - "instrument_aredis_connection", - ) + _process_module_definition("aredis.connection", "newrelic.hooks.datastore_aredis", "instrument_aredis_connection") _process_module_definition("aioredis.client", "newrelic.hooks.datastore_aioredis", "instrument_aioredis_client") @@ -3474,22 +3062,29 @@ def _process_module_builtin_defaults(): # v7 and below _process_module_definition( - "elasticsearch.client", - "newrelic.hooks.datastore_elasticsearch", - "instrument_elasticsearch_client", + "elasticsearch.client", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client" + ) + _process_module_definition( + "elasticsearch._async.client", "newrelic.hooks.datastore_elasticsearch", "instrument_async_elasticsearch_client" ) # v8 and above _process_module_definition( - "elasticsearch._sync.client", + "elasticsearch._sync.client", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_v8" + ) + _process_module_definition( + "elasticsearch._async.client", "newrelic.hooks.datastore_elasticsearch", - "instrument_elasticsearch_client_v8", + "instrument_async_elasticsearch_client_v8", ) # v7 and below _process_module_definition( - "elasticsearch.client.cat", + "elasticsearch.client.cat", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_cat" + ) + _process_module_definition( + "elasticsearch._async.client.cat", "newrelic.hooks.datastore_elasticsearch", - "instrument_elasticsearch_client_cat", + "instrument_async_elasticsearch_client_cat", ) # v8 and above _process_module_definition( @@ -3498,18 +3093,34 @@ def _process_module_builtin_defaults(): "instrument_elasticsearch_client_cat_v8", ) + _process_module_definition( + "elasticsearch._async.client.cat", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_cat_v8", + ) + # v7 and below _process_module_definition( "elasticsearch.client.cluster", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_cluster", ) + _process_module_definition( + "elasticsearch._async.client.cluster", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_cluster", + ) # v8 and above _process_module_definition( "elasticsearch._sync.client.cluster", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_cluster_v8", ) + _process_module_definition( + "elasticsearch._async.client.cluster", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_cluster_v8", + ) # v7 and below _process_module_definition( @@ -3517,18 +3128,31 @@ def _process_module_builtin_defaults(): "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_indices", ) + _process_module_definition( + "elasticsearch._async.client.indices", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_indices", + ) # v8 and above _process_module_definition( "elasticsearch._sync.client.indices", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_indices_v8", ) + _process_module_definition( + "elasticsearch._async.client.indices", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_indices_v8", + ) # v7 and below _process_module_definition( - "elasticsearch.client.nodes", + "elasticsearch.client.nodes", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_nodes" + ) + _process_module_definition( + "elasticsearch._async.client.nodes", "newrelic.hooks.datastore_elasticsearch", - "instrument_elasticsearch_client_nodes", + "instrument_async_elasticsearch_client_nodes", ) # v8 and above _process_module_definition( @@ -3536,6 +3160,11 @@ def _process_module_builtin_defaults(): "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_nodes_v8", ) + _process_module_definition( + "elasticsearch._async.client.nodes", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_nodes_v8", + ) # v7 and below _process_module_definition( @@ -3543,18 +3172,31 @@ def _process_module_builtin_defaults(): "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_snapshot", ) + _process_module_definition( + "elasticsearch._async.client.snapshot", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_snapshot", + ) # v8 and above _process_module_definition( "elasticsearch._sync.client.snapshot", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_snapshot_v8", ) + _process_module_definition( + "elasticsearch._async.client.snapshot", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_snapshot_v8", + ) # v7 and below _process_module_definition( - "elasticsearch.client.tasks", + "elasticsearch.client.tasks", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_tasks" + ) + _process_module_definition( + "elasticsearch._async.client.tasks", "newrelic.hooks.datastore_elasticsearch", - "instrument_elasticsearch_client_tasks", + "instrument_async_elasticsearch_client_tasks", ) # v8 and above _process_module_definition( @@ -3562,6 +3204,11 @@ def _process_module_builtin_defaults(): "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_tasks_v8", ) + _process_module_definition( + "elasticsearch._async.client.tasks", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_tasks_v8", + ) # v7 and below _process_module_definition( @@ -3569,12 +3216,22 @@ def _process_module_builtin_defaults(): "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_ingest", ) + _process_module_definition( + "elasticsearch._async.client.ingest", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_ingest", + ) # v8 and above _process_module_definition( "elasticsearch._sync.client.ingest", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_client_ingest_v8", ) + _process_module_definition( + "elasticsearch._async.client.ingest", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_client_ingest_v8", + ) # v7 and below _process_module_definition( @@ -3582,18 +3239,31 @@ def _process_module_builtin_defaults(): "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_connection_base", ) + _process_module_definition( + "elasticsearch._async.http_aiohttp", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elasticsearch_connection_base", + ) # v8 and above _process_module_definition( "elastic_transport._node._base", "newrelic.hooks.datastore_elasticsearch", "instrument_elastic_transport__node__base", ) + _process_module_definition( + "elastic_transport._node._base_async", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elastic_transport__node__base", + ) # v7 and below _process_module_definition( - "elasticsearch.transport", + "elasticsearch.transport", "newrelic.hooks.datastore_elasticsearch", "instrument_elasticsearch_transport" + ) + _process_module_definition( + "elasticsearch._async.transport", "newrelic.hooks.datastore_elasticsearch", - "instrument_elasticsearch_transport", + "instrument_async_elasticsearch_transport", ) # v8 and above _process_module_definition( @@ -3601,27 +3271,26 @@ def _process_module_builtin_defaults(): "newrelic.hooks.datastore_elasticsearch", "instrument_elastic_transport__transport", ) + _process_module_definition( + "elastic_transport._async_transport", + "newrelic.hooks.datastore_elasticsearch", + "instrument_async_elastic_transport__transport", + ) _process_module_definition("pika.adapters", "newrelic.hooks.messagebroker_pika", "instrument_pika_adapters") _process_module_definition("pika.channel", "newrelic.hooks.messagebroker_pika", "instrument_pika_channel") _process_module_definition("pika.spec", "newrelic.hooks.messagebroker_pika", "instrument_pika_spec") _process_module_definition( - "pyelasticsearch.client", - "newrelic.hooks.datastore_pyelasticsearch", - "instrument_pyelasticsearch_client", + "pyelasticsearch.client", "newrelic.hooks.datastore_pyelasticsearch", "instrument_pyelasticsearch_client" ) # Newer pymongo module locations _process_module_definition( - "pymongo.synchronous.pool", - "newrelic.hooks.datastore_pymongo", - "instrument_pymongo_synchronous_pool", + "pymongo.synchronous.pool", "newrelic.hooks.datastore_pymongo", "instrument_pymongo_synchronous_pool" ) _process_module_definition( - "pymongo.asynchronous.pool", - "newrelic.hooks.datastore_pymongo", - "instrument_pymongo_asynchronous_pool", + "pymongo.asynchronous.pool", "newrelic.hooks.datastore_pymongo", "instrument_pymongo_asynchronous_pool" ) _process_module_definition( @@ -3648,19 +3317,13 @@ def _process_module_builtin_defaults(): # Older pymongo module locations _process_module_definition( - "pymongo.connection", - "newrelic.hooks.datastore_pymongo", - "instrument_pymongo_synchronous_pool", + "pymongo.connection", "newrelic.hooks.datastore_pymongo", "instrument_pymongo_synchronous_pool" ) _process_module_definition( - "pymongo.collection", - "newrelic.hooks.datastore_pymongo", - "instrument_pymongo_synchronous_collection", + "pymongo.collection", "newrelic.hooks.datastore_pymongo", "instrument_pymongo_synchronous_collection" ) _process_module_definition( - "pymongo.mongo_client", - "newrelic.hooks.datastore_pymongo", - "instrument_pymongo_synchronous_mongo_client", + "pymongo.mongo_client", "newrelic.hooks.datastore_pymongo", "instrument_pymongo_synchronous_mongo_client" ) # Redis v4.2+ @@ -3678,11 +3341,7 @@ def _process_module_builtin_defaults(): "redis.asyncio.connection", "newrelic.hooks.datastore_redis", "instrument_asyncio_redis_connection" ) - _process_module_definition( - "redis.connection", - "newrelic.hooks.datastore_redis", - "instrument_redis_connection", - ) + _process_module_definition("redis.connection", "newrelic.hooks.datastore_redis", "instrument_redis_connection") _process_module_definition("redis.client", "newrelic.hooks.datastore_redis", "instrument_redis_client") _process_module_definition( @@ -3731,11 +3390,7 @@ def _process_module_builtin_defaults(): "valkey.asyncio.connection", "newrelic.hooks.datastore_valkey", "instrument_asyncio_valkey_connection" ) - _process_module_definition( - "valkey.connection", - "newrelic.hooks.datastore_valkey", - "instrument_valkey_connection", - ) + _process_module_definition("valkey.connection", "newrelic.hooks.datastore_valkey", "instrument_valkey_connection") _process_module_definition("valkey.client", "newrelic.hooks.datastore_valkey", "instrument_valkey_client") _process_module_definition( @@ -3781,48 +3436,28 @@ def _process_module_builtin_defaults(): "motor.motor_tornado", "newrelic.hooks.datastore_motor", "instrument_motor_motor_tornado" ) - _process_module_definition( - "piston.resource", - "newrelic.hooks.component_piston", - "instrument_piston_resource", - ) + _process_module_definition("piston.resource", "newrelic.hooks.component_piston", "instrument_piston_resource") _process_module_definition("piston.doc", "newrelic.hooks.component_piston", "instrument_piston_doc") _process_module_definition( - "tastypie.resources", - "newrelic.hooks.component_tastypie", - "instrument_tastypie_resources", + "tastypie.resources", "newrelic.hooks.component_tastypie", "instrument_tastypie_resources" ) _process_module_definition("tastypie.api", "newrelic.hooks.component_tastypie", "instrument_tastypie_api") - _process_module_definition( - "sklearn.metrics", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_metrics", - ) + _process_module_definition("sklearn.metrics", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_metrics") _process_module_definition( - "sklearn.tree._classes", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_tree_models", + "sklearn.tree._classes", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_tree_models" ) # In scikit-learn < 0.21 the model classes are in tree.py instead of _classes.py. - _process_module_definition( - "sklearn.tree.tree", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_tree_models", - ) + _process_module_definition("sklearn.tree.tree", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_tree_models") _process_module_definition( - "sklearn.compose._column_transformer", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_compose_models", + "sklearn.compose._column_transformer", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_compose_models" ) _process_module_definition( - "sklearn.compose._target", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_compose_models", + "sklearn.compose._target", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_compose_models" ) _process_module_definition( @@ -3874,9 +3509,7 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.covariance.elliptic_envelope", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_covariance_models", + "sklearn.covariance.elliptic_envelope", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_covariance_models" ) _process_module_definition( @@ -3886,39 +3519,27 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.ensemble._bagging", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_bagging_models", + "sklearn.ensemble._bagging", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_bagging_models" ) _process_module_definition( - "sklearn.ensemble.bagging", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_bagging_models", + "sklearn.ensemble.bagging", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_bagging_models" ) _process_module_definition( - "sklearn.ensemble._forest", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_forest_models", + "sklearn.ensemble._forest", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_forest_models" ) _process_module_definition( - "sklearn.ensemble.forest", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_forest_models", + "sklearn.ensemble.forest", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_forest_models" ) _process_module_definition( - "sklearn.ensemble._iforest", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_iforest_models", + "sklearn.ensemble._iforest", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_iforest_models" ) _process_module_definition( - "sklearn.ensemble.iforest", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_iforest_models", + "sklearn.ensemble.iforest", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_iforest_models" ) _process_module_definition( @@ -3934,9 +3555,7 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.ensemble._gb", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_gradient_boosting_models", + "sklearn.ensemble._gb", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_gradient_boosting_models" ) _process_module_definition( @@ -3946,9 +3565,7 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.ensemble._voting", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_voting_models", + "sklearn.ensemble._voting", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_voting_models" ) _process_module_definition( @@ -3958,9 +3575,7 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.ensemble._stacking", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_ensemble_stacking_models", + "sklearn.ensemble._stacking", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_ensemble_stacking_models" ) _process_module_definition( @@ -3970,27 +3585,19 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.linear_model._base", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model._base", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model.base", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model.base", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model._bayes", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_bayes_models", + "sklearn.linear_model._bayes", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_bayes_models" ) _process_module_definition( - "sklearn.linear_model.bayes", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_bayes_models", + "sklearn.linear_model.bayes", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_bayes_models" ) _process_module_definition( @@ -4018,21 +3625,15 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.linear_model._glm", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_GLM_models", + "sklearn.linear_model._glm", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_GLM_models" ) _process_module_definition( - "sklearn.linear_model._huber", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model._huber", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model.huber", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model.huber", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( @@ -4048,39 +3649,27 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.linear_model._ridge", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_ridge_models", + "sklearn.linear_model._ridge", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_ridge_models" ) _process_module_definition( - "sklearn.linear_model.ridge", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_ridge_models", + "sklearn.linear_model.ridge", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_ridge_models" ) _process_module_definition( - "sklearn.linear_model._logistic", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_logistic_models", + "sklearn.linear_model._logistic", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_logistic_models" ) _process_module_definition( - "sklearn.linear_model.logistic", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_logistic_models", + "sklearn.linear_model.logistic", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_logistic_models" ) _process_module_definition( - "sklearn.linear_model._omp", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_OMP_models", + "sklearn.linear_model._omp", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_OMP_models" ) _process_module_definition( - "sklearn.linear_model.omp", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_OMP_models", + "sklearn.linear_model.omp", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_OMP_models" ) _process_module_definition( @@ -4096,45 +3685,31 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.linear_model._perceptron", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model._perceptron", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model.perceptron", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model.perceptron", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model._quantile", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model._quantile", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model._ransac", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model._ransac", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model.ransac", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model.ransac", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model._theil_sen", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model._theil_sen", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( - "sklearn.linear_model.theil_sen", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_linear_models", + "sklearn.linear_model.theil_sen", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_linear_models" ) _process_module_definition( @@ -4156,34 +3731,22 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.gaussian_process._gpc", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_gaussian_process_models", + "sklearn.gaussian_process._gpc", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_gaussian_process_models" ) _process_module_definition( - "sklearn.gaussian_process.gpc", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_gaussian_process_models", + "sklearn.gaussian_process.gpc", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_gaussian_process_models" ) _process_module_definition( - "sklearn.gaussian_process._gpr", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_gaussian_process_models", + "sklearn.gaussian_process._gpr", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_gaussian_process_models" ) _process_module_definition( - "sklearn.gaussian_process.gpr", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_gaussian_process_models", + "sklearn.gaussian_process.gpr", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_gaussian_process_models" ) - _process_module_definition( - "sklearn.dummy", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_dummy_models", - ) + _process_module_definition("sklearn.dummy", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_dummy_models") _process_module_definition( "sklearn.feature_selection._rfe", @@ -4228,9 +3791,7 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.kernel_ridge", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_kernel_ridge_models", + "sklearn.kernel_ridge", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_kernel_ridge_models" ) _process_module_definition( @@ -4246,33 +3807,23 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.neural_network._rbm", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neural_network_models", + "sklearn.neural_network._rbm", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neural_network_models" ) _process_module_definition( - "sklearn.neural_network.rbm", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neural_network_models", + "sklearn.neural_network.rbm", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neural_network_models" ) _process_module_definition( - "sklearn.calibration", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_calibration_models", + "sklearn.calibration", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_calibration_models" ) _process_module_definition( - "sklearn.cluster._affinity_propagation", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster._affinity_propagation", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster.affinity_propagation_", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster.affinity_propagation_", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( @@ -4288,147 +3839,99 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.cluster._birch", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster._birch", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster.birch", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster.birch", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster._bisect_k_means", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_kmeans_models", + "sklearn.cluster._bisect_k_means", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_kmeans_models" ) _process_module_definition( - "sklearn.cluster._dbscan", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster._dbscan", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster.dbscan_", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster.dbscan_", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster._feature_agglomeration", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster._feature_agglomeration", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster._kmeans", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_kmeans_models", + "sklearn.cluster._kmeans", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_kmeans_models" ) _process_module_definition( - "sklearn.cluster.k_means_", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_kmeans_models", + "sklearn.cluster.k_means_", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_kmeans_models" ) _process_module_definition( - "sklearn.cluster._mean_shift", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster._mean_shift", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster.mean_shift_", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster.mean_shift_", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster._optics", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_models", + "sklearn.cluster._optics", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_models" ) _process_module_definition( - "sklearn.cluster._spectral", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_clustering_models", + "sklearn.cluster._spectral", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_clustering_models" ) _process_module_definition( - "sklearn.cluster.spectral", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_clustering_models", + "sklearn.cluster.spectral", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_clustering_models" ) _process_module_definition( - "sklearn.cluster._bicluster", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_clustering_models", + "sklearn.cluster._bicluster", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_clustering_models" ) _process_module_definition( - "sklearn.cluster.bicluster", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_cluster_clustering_models", + "sklearn.cluster.bicluster", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_cluster_clustering_models" ) _process_module_definition( - "sklearn.multiclass", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_multiclass_models", + "sklearn.multiclass", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_multiclass_models" ) _process_module_definition( - "sklearn.multioutput", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_multioutput_models", + "sklearn.multioutput", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_multioutput_models" ) _process_module_definition( - "sklearn.naive_bayes", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_naive_bayes_models", + "sklearn.naive_bayes", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_naive_bayes_models" ) _process_module_definition( - "sklearn.model_selection._search", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_model_selection_models", + "sklearn.model_selection._search", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_model_selection_models" ) _process_module_definition( - "sklearn.mixture._bayesian_mixture", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_mixture_models", + "sklearn.mixture._bayesian_mixture", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_mixture_models" ) _process_module_definition( - "sklearn.mixture.bayesian_mixture", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_mixture_models", + "sklearn.mixture.bayesian_mixture", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_mixture_models" ) _process_module_definition( - "sklearn.mixture._gaussian_mixture", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_mixture_models", + "sklearn.mixture._gaussian_mixture", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_mixture_models" ) _process_module_definition( - "sklearn.mixture.gaussian_mixture", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_mixture_models", + "sklearn.mixture.gaussian_mixture", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_mixture_models" ) _process_module_definition( - "sklearn.pipeline", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_pipeline_models", + "sklearn.pipeline", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_pipeline_models" ) _process_module_definition( @@ -4450,16 +3953,10 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.svm._classes", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_svm_models", + "sklearn.svm._classes", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_svm_models" ) - _process_module_definition( - "sklearn.svm.classes", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_svm_models", - ) + _process_module_definition("sklearn.svm.classes", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_svm_models") _process_module_definition( "sklearn.neighbors._classification", @@ -4474,81 +3971,55 @@ def _process_module_builtin_defaults(): ) _process_module_definition( - "sklearn.neighbors._graph", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_KRadius_models", + "sklearn.neighbors._graph", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_KRadius_models" ) _process_module_definition( - "sklearn.neighbors._kde", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors._kde", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "sklearn.neighbors.kde", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors.kde", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "sklearn.neighbors._lof", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors._lof", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "sklearn.neighbors.lof", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors.lof", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "sklearn.neighbors._nca", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors._nca", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "sklearn.neighbors._nearest_centroid", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors._nearest_centroid", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "sklearn.neighbors.nearest_centroid", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors.nearest_centroid", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "sklearn.neighbors._regression", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_KRadius_models", + "sklearn.neighbors._regression", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_KRadius_models" ) _process_module_definition( - "sklearn.neighbors.regression", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_KRadius_models", + "sklearn.neighbors.regression", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_KRadius_models" ) _process_module_definition( - "sklearn.neighbors._unsupervised", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors._unsupervised", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "sklearn.neighbors.unsupervised", - "newrelic.hooks.mlmodel_sklearn", - "instrument_sklearn_neighbors_models", + "sklearn.neighbors.unsupervised", "newrelic.hooks.mlmodel_sklearn", "instrument_sklearn_neighbors_models" ) _process_module_definition( - "rest_framework.views", - "newrelic.hooks.component_djangorestframework", - "instrument_rest_framework_views", + "rest_framework.views", "newrelic.hooks.component_djangorestframework", "instrument_rest_framework_views" ) _process_module_definition( "rest_framework.decorators", @@ -4556,180 +4027,86 @@ def _process_module_builtin_defaults(): "instrument_rest_framework_decorators", ) - _process_module_definition( - "celery.task.base", - "newrelic.hooks.application_celery", - "instrument_celery_app_task", - ) - _process_module_definition( - "celery.app.task", - "newrelic.hooks.application_celery", - "instrument_celery_app_task", - ) - _process_module_definition( - "celery.app.trace", - "newrelic.hooks.application_celery", - "instrument_celery_app_trace", - ) + _process_module_definition("celery.task.base", "newrelic.hooks.application_celery", "instrument_celery_app_task") + _process_module_definition("celery.app.task", "newrelic.hooks.application_celery", "instrument_celery_app_task") + _process_module_definition("celery.app.trace", "newrelic.hooks.application_celery", "instrument_celery_app_trace") _process_module_definition("celery.worker", "newrelic.hooks.application_celery", "instrument_celery_worker") _process_module_definition( - "celery.concurrency.processes", - "newrelic.hooks.application_celery", - "instrument_celery_worker", + "celery.concurrency.processes", "newrelic.hooks.application_celery", "instrument_celery_worker" ) _process_module_definition( - "celery.concurrency.prefork", - "newrelic.hooks.application_celery", - "instrument_celery_worker", + "celery.concurrency.prefork", "newrelic.hooks.application_celery", "instrument_celery_worker" ) - _process_module_definition( - "celery.app.base", - "newrelic.hooks.application_celery", - "instrument_celery_app_base", - ) + _process_module_definition("celery.app.base", "newrelic.hooks.application_celery", "instrument_celery_app_base") _process_module_definition("billiard.pool", "newrelic.hooks.application_celery", "instrument_billiard_pool") _process_module_definition("flup.server.cgi", "newrelic.hooks.adapter_flup", "instrument_flup_server_cgi") + _process_module_definition("flup.server.ajp_base", "newrelic.hooks.adapter_flup", "instrument_flup_server_ajp_base") _process_module_definition( - "flup.server.ajp_base", - "newrelic.hooks.adapter_flup", - "instrument_flup_server_ajp_base", + "flup.server.fcgi_base", "newrelic.hooks.adapter_flup", "instrument_flup_server_fcgi_base" ) _process_module_definition( - "flup.server.fcgi_base", - "newrelic.hooks.adapter_flup", - "instrument_flup_server_fcgi_base", - ) - _process_module_definition( - "flup.server.scgi_base", - "newrelic.hooks.adapter_flup", - "instrument_flup_server_scgi_base", + "flup.server.scgi_base", "newrelic.hooks.adapter_flup", "instrument_flup_server_scgi_base" ) - _process_module_definition( - "meinheld.server", - "newrelic.hooks.adapter_meinheld", - "instrument_meinheld_server", - ) + _process_module_definition("meinheld.server", "newrelic.hooks.adapter_meinheld", "instrument_meinheld_server") - _process_module_definition( - "waitress.server", - "newrelic.hooks.adapter_waitress", - "instrument_waitress_server", - ) + _process_module_definition("waitress.server", "newrelic.hooks.adapter_waitress", "instrument_waitress_server") _process_module_definition("gevent.wsgi", "newrelic.hooks.adapter_gevent", "instrument_gevent_wsgi") _process_module_definition("gevent.pywsgi", "newrelic.hooks.adapter_gevent", "instrument_gevent_pywsgi") _process_module_definition( - "wsgiref.simple_server", - "newrelic.hooks.adapter_wsgiref", - "instrument_wsgiref_simple_server", + "wsgiref.simple_server", "newrelic.hooks.adapter_wsgiref", "instrument_wsgiref_simple_server" ) _process_module_definition( - "cherrypy.wsgiserver", - "newrelic.hooks.adapter_cherrypy", - "instrument_cherrypy_wsgiserver", + "cherrypy.wsgiserver", "newrelic.hooks.adapter_cherrypy", "instrument_cherrypy_wsgiserver" ) - _process_module_definition( - "cheroot.wsgi", - "newrelic.hooks.adapter_cheroot", - "instrument_cheroot_wsgiserver", - ) + _process_module_definition("cheroot.wsgi", "newrelic.hooks.adapter_cheroot", "instrument_cheroot_wsgiserver") + _process_module_definition("pyramid.router", "newrelic.hooks.framework_pyramid", "instrument_pyramid_router") + _process_module_definition("pyramid.config", "newrelic.hooks.framework_pyramid", "instrument_pyramid_config_views") _process_module_definition( - "pyramid.router", - "newrelic.hooks.framework_pyramid", - "instrument_pyramid_router", + "pyramid.config.views", "newrelic.hooks.framework_pyramid", "instrument_pyramid_config_views" ) _process_module_definition( - "pyramid.config", - "newrelic.hooks.framework_pyramid", - "instrument_pyramid_config_views", - ) - _process_module_definition( - "pyramid.config.views", - "newrelic.hooks.framework_pyramid", - "instrument_pyramid_config_views", - ) - _process_module_definition( - "pyramid.config.tweens", - "newrelic.hooks.framework_pyramid", - "instrument_pyramid_config_tweens", + "pyramid.config.tweens", "newrelic.hooks.framework_pyramid", "instrument_pyramid_config_tweens" ) - _process_module_definition( - "cornice.service", - "newrelic.hooks.component_cornice", - "instrument_cornice_service", - ) + _process_module_definition("cornice.service", "newrelic.hooks.component_cornice", "instrument_cornice_service") _process_module_definition("gevent.monkey", "newrelic.hooks.coroutines_gevent", "instrument_gevent_monkey") _process_module_definition("thrift.transport.TSocket", "newrelic.hooks.external_thrift") + _process_module_definition("gearman.client", "newrelic.hooks.application_gearman", "instrument_gearman_client") _process_module_definition( - "gearman.client", - "newrelic.hooks.application_gearman", - "instrument_gearman_client", - ) - _process_module_definition( - "gearman.connection_manager", - "newrelic.hooks.application_gearman", - "instrument_gearman_connection_manager", - ) - _process_module_definition( - "gearman.worker", - "newrelic.hooks.application_gearman", - "instrument_gearman_worker", + "gearman.connection_manager", "newrelic.hooks.application_gearman", "instrument_gearman_connection_manager" ) + _process_module_definition("gearman.worker", "newrelic.hooks.application_gearman", "instrument_gearman_worker") _process_module_definition( - "aiobotocore.endpoint", - "newrelic.hooks.external_aiobotocore", - "instrument_aiobotocore_endpoint", + "aiobotocore.endpoint", "newrelic.hooks.external_aiobotocore", "instrument_aiobotocore_endpoint" ) - _process_module_definition( - "botocore.endpoint", - "newrelic.hooks.external_botocore", - "instrument_botocore_endpoint", - ) - _process_module_definition( - "botocore.client", - "newrelic.hooks.external_botocore", - "instrument_botocore_client", - ) + _process_module_definition("botocore.endpoint", "newrelic.hooks.external_botocore", "instrument_botocore_endpoint") + _process_module_definition("botocore.client", "newrelic.hooks.external_botocore", "instrument_botocore_client") _process_module_definition( - "s3transfer.futures", - "newrelic.hooks.external_s3transfer", - "instrument_s3transfer_futures", + "s3transfer.futures", "newrelic.hooks.external_s3transfer", "instrument_s3transfer_futures" ) _process_module_definition( - "tornado.httpserver", - "newrelic.hooks.framework_tornado", - "instrument_tornado_httpserver", - ) - _process_module_definition( - "tornado.httputil", - "newrelic.hooks.framework_tornado", - "instrument_tornado_httputil", - ) - _process_module_definition( - "tornado.httpclient", - "newrelic.hooks.framework_tornado", - "instrument_tornado_httpclient", + "tornado.httpserver", "newrelic.hooks.framework_tornado", "instrument_tornado_httpserver" ) + _process_module_definition("tornado.httputil", "newrelic.hooks.framework_tornado", "instrument_tornado_httputil") _process_module_definition( - "tornado.routing", - "newrelic.hooks.framework_tornado", - "instrument_tornado_routing", + "tornado.httpclient", "newrelic.hooks.framework_tornado", "instrument_tornado_httpclient" ) + _process_module_definition("tornado.routing", "newrelic.hooks.framework_tornado", "instrument_tornado_routing") _process_module_definition("tornado.web", "newrelic.hooks.framework_tornado", "instrument_tornado_web") @@ -4858,13 +4235,7 @@ def _setup_agent_control_health(): agent_control_health_thread.start() -def initialize( - config_file=None, - environment=None, - ignore_errors=None, - log_file=None, - log_level=None, -): +def initialize(config_file=None, environment=None, ignore_errors=None, log_file=None, log_level=None): agent_control_health.start_time_unix_nano = time.time_ns() if config_file is None: diff --git a/newrelic/hooks/datastore_elasticsearch.py b/newrelic/hooks/datastore_elasticsearch.py index 980385a1b..10f70f645 100644 --- a/newrelic/hooks/datastore_elasticsearch.py +++ b/newrelic/hooks/datastore_elasticsearch.py @@ -11,8 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from common.async_wrapper import coroutine_wrapper -from newrelic.api.datastore_trace import DatastoreTrace +from newrelic.api.datastore_trace import DatastoreTrace, DatastoreTraceWrapper from newrelic.api.transaction import current_transaction from newrelic.common.object_wrapper import function_wrapper, wrap_function_wrapper from newrelic.common.package_version_utils import get_package_version_tuple @@ -47,7 +48,7 @@ def _extract_args_allocation_explain_index( include_yes_decisions=None, index=None, *args, - **kwargs + **kwargs, ): return _index_name(index) @@ -106,6 +107,12 @@ def instrument_es_methods(module, _class, client_methods, prefix=None): wrap_elasticsearch_client_method(module, _class, method_name, arg_extractor, prefix) +def instrument_async_es_methods(module, _class, client_methods, prefix=None): + for method_name, arg_extractor in client_methods: + if hasattr(getattr(module, _class), method_name): + wrap_async_elasticsearch_client_method(module, _class, method_name, arg_extractor, prefix) + + def wrap_elasticsearch_client_method(module, class_name, method_name, arg_extractor, prefix=None): def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): transaction = current_transaction() @@ -145,6 +152,42 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): wrap_function_wrapper(module, f"{class_name}.{method_name}", _nr_wrapper_Elasticsearch_method_) +def wrap_async_elasticsearch_client_method(module, class_name, method_name, arg_extractor, prefix=None): + async def _nr_wrapper_AsyncElasticsearch_method_(wrapped, instance, args, kwargs): + transaction = current_transaction() + + if transaction is None: + return await wrapped(*args, **kwargs) + + if arg_extractor is None: + index = None + else: + index = arg_extractor(*args, **kwargs) + + if prefix: + operation = f"{prefix}.{method_name}" + else: + operation = method_name + + transaction._nr_datastore_instance_info = (None, None, None) + + dt = DatastoreTrace(product="Elasticsearch", target=index, operation=operation, source=wrapped) + + with dt: + result = await wrapped(*args, **kwargs) + + instance_info = transaction._nr_datastore_instance_info + host, port_path_or_id, _ = instance_info + + dt.host = host + dt.port_path_or_id = port_path_or_id + + return result + + wrapped = coroutine_wrapper + wrap_function_wrapper(module, f"{class_name}.{method_name}", _nr_wrapper_AsyncElasticsearch_method_) + + _elasticsearch_client_methods_below_v8 = ( ("abort_benchmark", None), ("benchmark", _extract_args_index), @@ -186,6 +229,7 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): ("update", _extract_args_index), ) +_async_elasticsearch_client_methods_below_v8 = tuple(_elasticsearch_client_methods_below_v8) _elasticsearch_client_methods_v8 = ( ("bulk", _extract_args_operations_index), @@ -235,6 +279,8 @@ def _nr_wrapper_Elasticsearch_method_(wrapped, instance, args, kwargs): ("update_by_query_rethrottle", None), ) +_async_elasticsearch_client_methods_v8 = tuple(_elasticsearch_client_methods_v8) + def instrument_elasticsearch_client(module): # The module path was remapped in v8 to match previous versions. @@ -244,10 +290,22 @@ def instrument_elasticsearch_client(module): instrument_es_methods(module, "Elasticsearch", _elasticsearch_client_methods_below_v8) +def instrument_async_elasticsearch_client(module): + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_async_es_methods(module, "AsyncElasticsearch", _async_elasticsearch_client_methods_below_v8) + + def instrument_elasticsearch_client_v8(module): instrument_es_methods(module, "Elasticsearch", _elasticsearch_client_methods_v8) +def instrument_async_elasticsearch_client_v8(module): + instrument_async_es_methods(module, "AsyncElasticsearch", _async_elasticsearch_client_methods_v8) + + _elasticsearch_client_indices_methods_below_v8 = ( ("analyze", _extract_args_index), ("clear_cache", _extract_args_index), @@ -290,6 +348,7 @@ def instrument_elasticsearch_client_v8(module): ("validate_query", _extract_args_index), ) +_async_elasticsearch_client_indices_methods_below_v8 = tuple(_elasticsearch_client_indices_methods_below_v8) _elasticsearch_client_indices_methods_v8 = ( ("add_block", _extract_args_index), @@ -348,6 +407,8 @@ def instrument_elasticsearch_client_v8(module): ("validate_query", _extract_args_index), ) +_async_elasticsearch_client_indices_methods_v8 = tuple(_elasticsearch_client_indices_methods_v8) + def instrument_elasticsearch_client_indices(module): # The module path was remapped in v8 to match previous versions. @@ -357,10 +418,22 @@ def instrument_elasticsearch_client_indices(module): instrument_es_methods(module, "IndicesClient", _elasticsearch_client_indices_methods_below_v8, "indices") +def instrument_async_elasticsearch_client_indices(module): + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_async_es_methods(module, "IndicesClient", _elasticsearch_client_indices_methods_below_v8, "indices") + + def instrument_elasticsearch_client_indices_v8(module): instrument_es_methods(module, "IndicesClient", _elasticsearch_client_indices_methods_v8, "indices") +def instrument_async_elasticsearch_client_indices_v8(module): + instrument_async_es_methods(module, "IndicesClient", _async_elasticsearch_client_indices_methods_v8, "indices") + + _elasticsearch_client_cat_methods_below_v8 = ( ("aliases", None), ("allocation", None), @@ -379,6 +452,8 @@ def instrument_elasticsearch_client_indices_v8(module): ("thread_pool", None), ) +_async_elasticsearch_client_cat_methods_below_v8 = tuple(_elasticsearch_client_cat_methods_below_v8) + _elasticsearch_client_cat_methods_v8 = ( ("aliases", None), ("allocation", None), @@ -408,6 +483,8 @@ def instrument_elasticsearch_client_indices_v8(module): ("transforms", None), ) +_async_elasticsearch_client_cat_methods_v8 = tuple(_elasticsearch_client_cat_methods_v8) + def instrument_elasticsearch_client_cat(module): # The module path was remapped in v8 to match previous versions. @@ -417,10 +494,22 @@ def instrument_elasticsearch_client_cat(module): instrument_es_methods(module, "CatClient", _elasticsearch_client_cat_methods_below_v8, "cat") +def instrument_async_elasticsearch_client_cat(module): + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_async_es_methods(module, "CatClient", _async_elasticsearch_client_cat_methods_below_v8, "cat") + + def instrument_elasticsearch_client_cat_v8(module): instrument_es_methods(module, "CatClient", _elasticsearch_client_cat_methods_v8, "cat") +def instrument_async_elasticsearch_client_cat_v8(module): + instrument_async_es_methods(module, "CatClient", _async_elasticsearch_client_cat_methods_v8, "cat") + + _elasticsearch_client_cluster_methods_below_v8 = ( ("get_settings", None), ("health", _extract_args_index), @@ -431,6 +520,7 @@ def instrument_elasticsearch_client_cat_v8(module): ("stats", None), ) +_async_elasticsearch_client_cluster_methods_below_v8 = tuple(_elasticsearch_client_cluster_methods_below_v8) _elasticsearch_client_cluster_methods_v8 = ( ("allocation_explain", _extract_args_allocation_explain_index), @@ -450,6 +540,8 @@ def instrument_elasticsearch_client_cat_v8(module): ("stats", None), ) +_async_elasticsearch_client_cluster_methods_v8 = tuple(_elasticsearch_client_cluster_methods_v8) + def instrument_elasticsearch_client_cluster(module): # The module path was remapped in v8 to match previous versions. @@ -459,16 +551,33 @@ def instrument_elasticsearch_client_cluster(module): instrument_es_methods(module, "ClusterClient", _elasticsearch_client_cluster_methods_below_v8, "cluster") +def instrument_async_elasticsearch_client_cluster(module): + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_async_es_methods( + module, "ClusterClient", _async_elasticsearch_client_cluster_methods_below_v8, "cluster" + ) + + def instrument_elasticsearch_client_cluster_v8(module): instrument_es_methods(module, "ClusterClient", _elasticsearch_client_cluster_methods_v8, "cluster") +def instrument_async_elasticsearch_client_cluster_v8(module): + instrument_async_es_methods(module, "ClusterClient", _async_elasticsearch_client_cluster_methods_v8, "cluster") + + _elasticsearch_client_nodes_methods_below_v8 = ( ("hot_threads", None), ("info", None), ("shutdown", None), ("stats", None), ) + +_async_elasticsearch_client_nodes_methods_below_v8 = tuple(_elasticsearch_client_nodes_methods_below_v8) + _elasticsearch_client_nodes_methods_v8 = ( ("clear_repositories_metering_archive", None), ("get_repositories_metering_info", None), @@ -479,6 +588,8 @@ def instrument_elasticsearch_client_cluster_v8(module): ("usage", None), ) +_async_elasticsearch_client_nodes_methods_v8 = tuple(_elasticsearch_client_nodes_methods_v8) + def instrument_elasticsearch_client_nodes(module): # The module path was remapped in v8 to match previous versions. @@ -488,10 +599,22 @@ def instrument_elasticsearch_client_nodes(module): instrument_es_methods(module, "NodesClient", _elasticsearch_client_nodes_methods_below_v8, "nodes") +def instrument_async_elasticsearch_client_nodes(module): + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_async_es_methods(module, "NodesClient", _async_elasticsearch_client_nodes_methods_below_v8, "nodes") + + def instrument_elasticsearch_client_nodes_v8(module): instrument_es_methods(module, "NodesClient", _elasticsearch_client_nodes_methods_v8, "nodes") +def instrument_async_elasticsearch_client_nodes_v8(module): + instrument_async_es_methods(module, "NodesClient", _async_elasticsearch_client_nodes_methods_v8, "nodes") + + _elasticsearch_client_snapshot_methods_below_v8 = ( ("create", None), ("create_repository", None), @@ -503,6 +626,9 @@ def instrument_elasticsearch_client_nodes_v8(module): ("status", None), ("verify_repository", None), ) + +_async_elasticsearch_client_snapshot_methods_below_v8 = tuple(_elasticsearch_client_snapshot_methods_below_v8) + _elasticsearch_client_snapshot_methods_v8 = ( ("cleanup_repository", None), ("clone", None), @@ -517,6 +643,8 @@ def instrument_elasticsearch_client_nodes_v8(module): ("verify_repository", None), ) +_async_elasticsearch_client_snapshot_methods_v8 = tuple(_elasticsearch_client_snapshot_methods_v8) + def instrument_elasticsearch_client_snapshot(module): # The module path was remapped in v8 to match previous versions. @@ -526,15 +654,27 @@ def instrument_elasticsearch_client_snapshot(module): instrument_es_methods(module, "SnapshotClient", _elasticsearch_client_snapshot_methods_below_v8, "snapshot") +def instrument_async_elasticsearch_client_snapshot(module): + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_async_es_methods( + module, "SnapshotClient", _async_elasticsearch_client_snapshot_methods_below_v8, "snapshot" + ) + + def instrument_elasticsearch_client_snapshot_v8(module): instrument_es_methods(module, "SnapshotClient", _elasticsearch_client_snapshot_methods_v8, "snapshot") -_elasticsearch_client_tasks_methods = ( - ("list", None), - ("cancel", None), - ("get", None), -) +def instrument_async_elasticsearch_client_snapshot_v8(module): + instrument_async_es_methods(module, "SnapshotClient", _async_elasticsearch_client_snapshot_methods_v8, "snapshot") + + +_elasticsearch_client_tasks_methods = (("list", None), ("cancel", None), ("get", None)) + +_async_elasticsearch_client_tasks_methods = tuple(_elasticsearch_client_tasks_methods) def instrument_elasticsearch_client_tasks(module): @@ -545,10 +685,22 @@ def instrument_elasticsearch_client_tasks(module): instrument_es_methods(module, "TasksClient", _elasticsearch_client_tasks_methods, "tasks") +def instrument_async_elasticsearch_client_tasks(module): + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_async_es_methods(module, "TasksClient", _async_elasticsearch_client_tasks_methods, "tasks") + + def instrument_elasticsearch_client_tasks_v8(module): instrument_es_methods(module, "TasksClient", _elasticsearch_client_tasks_methods, "tasks") +def instrument_async_elasticsearch_client_tasks_v8(module): + instrument_async_es_methods(module, "TasksClient", _async_elasticsearch_client_tasks_methods, "tasks") + + _elasticsearch_client_ingest_methods_below_v8 = ( ("get_pipeline", None), ("put_pipeline", None), @@ -556,6 +708,8 @@ def instrument_elasticsearch_client_tasks_v8(module): ("simulate", None), ) +_async_elasticsearch_client_ingest_methods_below_v8 = tuple(_elasticsearch_client_ingest_methods_below_v8) + _elasticsearch_client_ingest_methods_v8 = ( ("delete_pipeline", None), ("geo_ip_stats", None), @@ -565,6 +719,8 @@ def instrument_elasticsearch_client_tasks_v8(module): ("simulate", None), ) +_async_elasticsearch_client_ingest_methods_v8 = tuple(_elasticsearch_client_ingest_methods_v8) + def instrument_elasticsearch_client_ingest(module): # The module path was remapped in v8 to match previous versions. @@ -574,10 +730,24 @@ def instrument_elasticsearch_client_ingest(module): instrument_es_methods(module, "IngestClient", _elasticsearch_client_ingest_methods_below_v8, "ingest") +def instrument_async_elasticsearch_client_ingest(module): + # The module path was remapped in v8 to match previous versions. + # In order to avoid double wrapping we check the version before + # wrapping. + if ES_VERSION < (8,): + instrument_async_es_methods( + module, "IngestClient", _async_elasticsearch_client_ingest_methods_below_v8, "ingest" + ) + + def instrument_elasticsearch_client_ingest_v8(module): instrument_es_methods(module, "IngestClient", _elasticsearch_client_ingest_methods_v8, "ingest") +def instrument_async_elasticsearch_client_ingest_v8(module): + instrument_async_es_methods(module, "IngestClient", _async_elasticsearch_client_ingest_methods_v8, "ingest") + + # # Instrumentation to get Datastore Instance Information # @@ -600,6 +770,10 @@ def instrument_elasticsearch_connection_base(module): wrap_function_wrapper(module, "Connection.__init__", _nr_Connection__init__wrapper) +def instrument_async_elasticsearch_connection_base(module): + wrap_function_wrapper(module, "AsyncConnection.__init__", _nr_Connection__init__wrapper) + + def BaseNode__init__wrapper(wrapped, instance, args, kwargs): result = wrapped(*args, **kwargs) instance._nr_host_port = (instance.host, str(instance.port)) @@ -611,6 +785,11 @@ def instrument_elastic_transport__node__base(module): wrap_function_wrapper(module, "BaseNode.__init__", BaseNode__init__wrapper) +def instrument_async_elastic_transport__node__base(module): + if hasattr(module, "BaseNode"): + wrap_function_wrapper(module, "BaseAsyncNode.__init__", BaseNode__init__wrapper) + + def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): """Read instance info from Connection and stash on Transaction.""" @@ -636,6 +815,31 @@ def _nr_get_connection_wrapper(wrapped, instance, args, kwargs): return conn +async def _nr_get_async_connection_wrapper(wrapped, instance, args, kwargs): + """Read instance info from async Connection and stash on Transaction.""" + + transaction = current_transaction() + + if transaction is None: + return await wrapped(*args, **kwargs) + + conn = await wrapped(*args, **kwargs) + + instance_info = (None, None, None) + try: + tracer_settings = transaction.settings.datastore_tracer + + if tracer_settings.instance_reporting.enabled: + host, port_path_or_id = conn._nr_host_port + instance_info = (host, port_path_or_id, None) + except Exception: + instance_info = ("unknown", "unknown", None) + + transaction._nr_datastore_instance_info = instance_info + + return conn + + def _nr_perform_request_wrapper(wrapped, instance, args, kwargs): """Read instance info from Connection and stash on Transaction.""" @@ -651,11 +855,35 @@ def _nr_perform_request_wrapper(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) +async def _nr_async_perform_request_wrapper(wrapped, instance, args, kwargs): + """Read instance info from Async Connection and stash on Transaction.""" + transaction = current_transaction() + + if transaction is None: + return await wrapped(*args, **kwargs) + + if not hasattr(instance.node_pool.get, "_nr_wrapped"): + instance.node_pool.get = function_wrapper(_nr_get_connection_wrapper)(instance.node_pool.get) + instance.node_pool.get._nr_wrapped = True + + return await wrapped(*args, **kwargs) + + def instrument_elasticsearch_transport(module): if hasattr(module, "Transport") and hasattr(module.Transport, "get_connection"): wrap_function_wrapper(module, "Transport.get_connection", _nr_get_connection_wrapper) +def instrument_async_elasticsearch_transport(module): + if hasattr(module, "AsyncTransport") and hasattr(module.AsyncTransport, "get_connection"): + wrap_function_wrapper(module, "AsyncTransport.get_connection", _nr_get_async_connection_wrapper) + + def instrument_elastic_transport__transport(module): if hasattr(module, "Transport") and hasattr(module.Transport, "perform_request"): wrap_function_wrapper(module, "Transport.perform_request", _nr_perform_request_wrapper) + + +def instrument_async_elastic_transport__transport(module): + if hasattr(module, "AsyncTransport") and hasattr(module.AsyncTransport, "perform_request"): + wrap_function_wrapper(module, "AsyncTransport.perform_request", _nr_async_perform_request_wrapper) diff --git a/tests/datastore_elasticsearch/conftest.py b/tests/datastore_elasticsearch/conftest.py index e70dde884..9fbbb1ff0 100644 --- a/tests/datastore_elasticsearch/conftest.py +++ b/tests/datastore_elasticsearch/conftest.py @@ -47,3 +47,10 @@ def client(): from elasticsearch import Elasticsearch return Elasticsearch(ES_URL) + + +@pytest.fixture(scope="session") +def async_client(): + from elasticsearch import AsyncElasticsearch + + return AsyncElasticsearch(ES_URL) diff --git a/tests/datastore_elasticsearch/test_async_connection.py b/tests/datastore_elasticsearch/test_async_connection.py new file mode 100644 index 000000000..c2c28707e --- /dev/null +++ b/tests/datastore_elasticsearch/test_async_connection.py @@ -0,0 +1,63 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +try: + from elasticsearch._async.http_aiohttp import AsyncConnection +except ImportError: + from elastic_transport._models import NodeConfig + from elastic_transport._node._base_async import BaseAsyncNode as AsyncConnection + +from conftest import ES_VERSION, ES_SETTINGS + + +HOST = {"scheme": "http", "host": ES_SETTINGS["host"], "port": int(ES_SETTINGS["port"])} + +IS_V8 = ES_VERSION >= (8,) +SKIP_IF_V7 = pytest.mark.skipif(not IS_V8, reason="Skipping v8 tests.") +SKIP_IF_V8 = pytest.mark.skipif(IS_V8, reason="Skipping v7 tests.") + + +def test_connection_default(): + if IS_V8: + conn = AsyncConnection(NodeConfig(**HOST)) + else: + conn = AsyncConnection(**HOST) + + assert conn._nr_host_port == (ES_SETTINGS["host"], ES_SETTINGS["port"]) + + +@SKIP_IF_V7 +def test_connection_config(): + conn = AsyncConnection(NodeConfig(scheme="http", host="foo", port=8888)) + assert conn._nr_host_port == ("foo", "8888") + + +@SKIP_IF_V8 +def test_connection_host_arg(): + conn = AsyncConnection("the_host") + assert conn._nr_host_port == ("the_host", "9200") + + +@SKIP_IF_V8 +def test_connection_args(): + conn = AsyncConnection("the_host", 9999) + assert conn._nr_host_port == ("the_host", "9999") + + +@SKIP_IF_V8 +def test_connection_kwargs(): + conn = AsyncConnection(host="foo", port=8888) + assert conn._nr_host_port == ("foo", "8888") diff --git a/tests/datastore_elasticsearch/test_async_database_duration.py b/tests/datastore_elasticsearch/test_async_database_duration.py new file mode 100644 index 000000000..5e7df3f30 --- /dev/null +++ b/tests/datastore_elasticsearch/test_async_database_duration.py @@ -0,0 +1,71 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sqlite3 +import pytest +from testing_support.validators.validate_database_duration import validate_database_duration + +from newrelic.api.background_task import background_task + +from conftest import ES_VERSION + + +async def _exercise_es_v7(es): + await es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + await es.index( + index="contacts", doc_type="person", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2 + ) + await es.index( + index="contacts", doc_type="person", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3 + ) + await es.indices.refresh("contacts") + + +async def _exercise_es_v8(es): + await es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + await es.index(index="contacts", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) + await es.index(index="contacts", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + await es.indices.refresh(index="contacts") + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + + +@pytest.mark.asyncio +@validate_database_duration() +@background_task() +async def test_elasticsearch_database_duration(async_client): + await _exercise_es(async_client) + + +@pytest.mark.asyncio +@validate_database_duration() +@background_task() +async def test_elasticsearch_and_sqlite_database_duration(async_client): + # Make Elasticsearch queries + + await _exercise_es(async_client) + + # Make sqlite queries + + conn = sqlite3.connect(":memory:") + cur = conn.cursor() + + cur.execute("CREATE TABLE people (name text, age int)") + cur.execute("INSERT INTO people VALUES ('Bob', 22)") + + conn.commit() + conn.close() diff --git a/tests/datastore_elasticsearch/test_async_elasticsearch.py b/tests/datastore_elasticsearch/test_async_elasticsearch.py new file mode 100644 index 000000000..3a6ce77f2 --- /dev/null +++ b/tests/datastore_elasticsearch/test_async_elasticsearch.py @@ -0,0 +1,238 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest +import elasticsearch._async.client as client +from testing_support.fixtures import override_application_settings +from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_errors import validate_transaction_errors +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics + +from newrelic.api.background_task import background_task + +from conftest import ES_VERSION, ES_SETTINGS + + +# Settings + +_enable_instance_settings = {"datastore_tracer.instance_reporting.enabled": True} +_disable_instance_settings = {"datastore_tracer.instance_reporting.enabled": False} + +# Metrics + +_base_scoped_metrics = [ + ("Datastore/statement/Elasticsearch/_all/cluster.health", 1), + ("Datastore/statement/Elasticsearch/_all/search", 2), + ("Datastore/statement/Elasticsearch/address/index", 2), + ("Datastore/statement/Elasticsearch/address/search", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 3), + ("Datastore/statement/Elasticsearch/contacts/indices.refresh", 1), + ("Datastore/statement/Elasticsearch/contacts/search", 2), + ("Datastore/statement/Elasticsearch/other/search", 2), +] + +_base_rollup_metrics = [ + ("Datastore/operation/Elasticsearch/cluster.health", 1), + ("Datastore/operation/Elasticsearch/index", 5), + ("Datastore/operation/Elasticsearch/indices.refresh", 1), + ("Datastore/operation/Elasticsearch/search", 7), + ("Datastore/statement/Elasticsearch/_all/cluster.health", 1), + ("Datastore/statement/Elasticsearch/_all/search", 2), + ("Datastore/statement/Elasticsearch/address/index", 2), + ("Datastore/statement/Elasticsearch/address/search", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 3), + ("Datastore/statement/Elasticsearch/contacts/indices.refresh", 1), + ("Datastore/statement/Elasticsearch/contacts/search", 2), + ("Datastore/statement/Elasticsearch/other/search", 2), +] + +# Version support + + +def is_importable(module_path): + try: + __import__(module_path) + return True + except ImportError: + return False + + +_all_count = 17 + +if is_importable("elasticsearch._async.client.cat") or is_importable("elasticsearch._async.client.cat"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/cat.health", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/cat.health", 1)) + _all_count += 1 +else: + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/cat.health", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/cat.health", None)) + +if is_importable("elasticsearch._async.client.nodes") or is_importable("elasticsearch._async.client.nodes"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", 1)) + _all_count += 1 +else: + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/nodes.info", None)) + +if hasattr(client, "SnapshotClient") and hasattr(client.SnapshotClient, "status"): + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", 1)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", 1)) + _all_count += 1 +else: + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", None)) + _base_rollup_metrics.append(("Datastore/operation/Elasticsearch/snapshot.status", None)) + +if hasattr(client.IndicesClient, "status"): + _base_scoped_metrics.append(("Datastore/statement/Elasticsearch/_all/indices.status", 1)) + _base_rollup_metrics.extend( + [ + ("Datastore/operation/Elasticsearch/indices.status", 1), + ("Datastore/statement/Elasticsearch/_all/indices.status", 1), + ] + ) + _all_count += 1 +else: + _base_scoped_metrics.append(("Datastore/operation/Elasticsearch/indices.status", None)) + _base_rollup_metrics.extend( + [ + ("Datastore/operation/Elasticsearch/indices.status", None), + ("Datastore/statement/Elasticsearch/_all/indices.status", None), + ] + ) + +_base_rollup_metrics.extend( + [ + ("Datastore/all", _all_count), + ("Datastore/allOther", _all_count), + ("Datastore/Elasticsearch/all", _all_count), + ("Datastore/Elasticsearch/allOther", _all_count), + ] +) + +# Instance info + +_disable_scoped_metrics = list(_base_scoped_metrics) +_disable_rollup_metrics = list(_base_rollup_metrics) + +_enable_scoped_metrics = list(_base_scoped_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + +_host = instance_hostname(ES_SETTINGS["host"]) +_port = ES_SETTINGS["port"] + +_instance_metric_name = f"Datastore/instance/Elasticsearch/{_host}/{_port}" + +_enable_rollup_metrics.append((_instance_metric_name, _all_count)) + +_disable_rollup_metrics.append((_instance_metric_name, None)) + +# Query + + +async def _exercise_es_v7(es): + await es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + await es.index( + index="contacts", doc_type="person", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2 + ) + await es.index( + index="contacts", doc_type="person", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3 + ) + await es.indices.refresh("contacts") + await es.index( + index="address", doc_type="employee", body={"name": "Sherlock", "address": "221B Baker Street, London"}, id=1 + ) + await es.index( + index="address", + doc_type="employee", + body={"name": "Bilbo", "address": "Bag End, Bagshot row, Hobbiton, Shire"}, + id=2, + ) + await es.search(index="contacts", q="name:Joe") + await es.search(index="contacts", q="name:jessica") + await es.search(index="address", q="name:Sherlock") + await es.search(index=["contacts", "address"], q="name:Bilbo") + await es.search(index="contacts,address", q="name:Bilbo") + await es.search(index="*", q="name:Bilbo") + await es.search(q="name:Bilbo") + await es.cluster.health() + + if hasattr(es, "cat"): + await es.cat.health() + if hasattr(es, "nodes"): + await es.nodes.info() + if hasattr(es, "snapshot") and hasattr(es.snapshot, "status"): + await es.snapshot.status() + if hasattr(es.indices, "status"): + await es.indices.status() + + +async def _exercise_es_v8(es): + await es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + await es.index(index="contacts", body={"name": "Jessica Coder", "age": 32, "title": "Programmer"}, id=2) + await es.index(index="contacts", body={"name": "Freddy Tester", "age": 29, "title": "Assistant"}, id=3) + await es.indices.refresh(index="contacts") + await es.index(index="address", body={"name": "Sherlock", "address": "221B Baker Street, London"}, id=1) + await es.index(index="address", body={"name": "Bilbo", "address": "Bag End, Bagshot row, Hobbiton, Shire"}, id=2) + await es.search(index="contacts", q="name:Joe") + await es.search(index="contacts", q="name:jessica") + await es.search(index="address", q="name:Sherlock") + await es.search(index=["contacts", "address"], q="name:Bilbo") + await es.search(index="contacts,address", q="name:Bilbo") + await es.search(index="*", q="name:Bilbo") + await es.search(q="name:Bilbo") + await es.cluster.health() + + if hasattr(es, "cat"): + await es.cat.health() + if hasattr(es, "nodes"): + await es.nodes.info() + if hasattr(es, "snapshot") and hasattr(es.snapshot, "status"): + await es.snapshot.status() + if hasattr(es.indices, "status"): + await es.indices.status() + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + + +# Test + + +@validate_transaction_errors(errors=[]) +@validate_transaction_metrics( + "test_async_elasticsearch:test_async_elasticsearch_operation_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@override_application_settings(_disable_instance_settings) +@background_task() +def test_async_elasticsearch_operation_disabled(async_client, loop): + loop.run_until_complete(_exercise_es(async_client)) + + +@validate_transaction_errors(errors=[]) +@validate_transaction_metrics( + "test_async_elasticsearch:test_async_elasticsearch_operation_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@override_application_settings(_enable_instance_settings) +@background_task() +def test_async_elasticsearch_operation_enabled(async_client, loop): + loop.run_until_complete(_exercise_es(async_client)) diff --git a/tests/datastore_elasticsearch/test_async_instrumented_methods.py b/tests/datastore_elasticsearch/test_async_instrumented_methods.py new file mode 100644 index 000000000..dc8bfed07 --- /dev/null +++ b/tests/datastore_elasticsearch/test_async_instrumented_methods.py @@ -0,0 +1,131 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import elasticsearch +import elasticsearch._async.client as async_client +import pytest +from conftest import ES_VERSION +from testing_support.validators.validate_datastore_trace_inputs import validate_datastore_trace_inputs + +from newrelic.api.background_task import background_task + +RUN_IF_V8 = pytest.mark.skipif( + ES_VERSION < (8,), reason="Only run for v8+. We don't support all methods in previous versions." +) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "sub_module,method,args,kwargs,expected_index", + [ + (None, "exists", (), {"index": "contacts", "id": 1}, "contacts"), + (None, "info", (), {}, None), + pytest.param( + None, + "msearch", + (), + {"searches": [{}, {"query": {"match": {"message": "this is a test"}}}], "index": "contacts"}, + "contacts", + marks=RUN_IF_V8, + ), + ("indices", "exists", (), {"index": "contacts"}, "contacts"), + ("indices", "exists_template", (), {"name": "no-exist"}, None), + ("cat", "count", (), {"index": "contacts"}, "contacts"), + ("cat", "health", (), {}, None), + pytest.param( + "cluster", + "allocation_explain", + (), + {"index": "contacts", "shard": 0, "primary": True}, + "contacts", + marks=RUN_IF_V8, + ), + ("cluster", "get_settings", (), {}, None), + ("cluster", "health", (), {"index": "contacts"}, "contacts"), + ("nodes", "info", (), {}, None), + ("snapshot", "status", (), {}, None), + ("tasks", "list", (), {}, None), + ("ingest", "geo_ip_stats", (), {}, None), + ], +) +async def test_method_on_async_client_datastore_trace_inputs( + async_client, sub_module, method, args, kwargs, expected_index +): + expected_operation = f"{sub_module}.{method}" if sub_module else method + + @validate_datastore_trace_inputs(target=expected_index, operation=expected_operation) + @background_task() + async def _test(): + if not sub_module: + getattr(async_client, method)(*args, **kwargs) + else: + getattr(getattr(async_client, sub_module), method)(*args, **kwargs) + + await _test() + + +def _test_methods_wrapped(_object, ignored_methods=None): + if not ignored_methods: + ignored_methods = {"perform_request", "transport"} + + def is_wrapped(m): + return hasattr(getattr(_object, m), "__wrapped__") + + methods = {m for m in dir(_object) if not m[0] == "_"} + uninstrumented = {m for m in (methods - ignored_methods) if not is_wrapped(m)} + assert not uninstrumented, f"There are uninstrumented methods: {uninstrumented}" + + +@RUN_IF_V8 +def test_async_instrumented_methods_client(): + _test_methods_wrapped(elasticsearch.AsyncElasticsearch) + + +@RUN_IF_V8 +def test_instrumented_methods_client_indices(): + _test_methods_wrapped(async_client.IndicesClient) + + +@RUN_IF_V8 +def test_instrumented_methods_client_cluster(): + _test_methods_wrapped(async_client.ClusterClient) + + +@RUN_IF_V8 +def test_instrumented_methods_client_cat(): + if hasattr(async_client, "CatClient"): + _test_methods_wrapped(async_client.CatClient) + + +@RUN_IF_V8 +def test_instrumented_methods_client_nodes(): + if hasattr(async_client, "NodesClient"): + _test_methods_wrapped(async_client.NodesClient) + + +@RUN_IF_V8 +def test_instrumented_methods_client_snapshot(): + if hasattr(async_client, "SnapshotClient"): + _test_methods_wrapped(async_client.SnapshotClient) + + +@RUN_IF_V8 +def test_instrumented_methods_client_tasks(): + if hasattr(async_client, "TasksClient"): + _test_methods_wrapped(async_client.TasksClient) + + +@RUN_IF_V8 +def test_instrumented_methods_client_ingest(): + if hasattr(async_client, "IngestClient"): + _test_methods_wrapped(async_client.IngestClient) diff --git a/tests/datastore_elasticsearch/test_async_mget.py b/tests/datastore_elasticsearch/test_async_mget.py new file mode 100644 index 000000000..d8cfc7b11 --- /dev/null +++ b/tests/datastore_elasticsearch/test_async_mget.py @@ -0,0 +1,143 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +try: + from elastic_transport import RoundRobinSelector + from elasticsearch import AsyncElasticsearch +except ImportError: + from elasticsearch.connection_pool import RoundRobinSelector + from elasticsearch._async.client import AsyncElasticsearch + +from conftest import ES_MULTIPLE_SETTINGS, ES_VERSION +from testing_support.fixtures import override_application_settings +from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics + +from newrelic.api.background_task import background_task + +# Settings + +_enable_instance_settings = {"datastore_tracer.instance_reporting.enabled": True} +_disable_instance_settings = {"datastore_tracer.instance_reporting.enabled": False} + +# Metrics + +_base_scoped_metrics = (("Datastore/statement/Elasticsearch/contacts/index", 2),) + +_base_rollup_metrics = ( + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Elasticsearch/all", 3), + ("Datastore/Elasticsearch/allOther", 3), + ("Datastore/operation/Elasticsearch/index", 2), + ("Datastore/operation/Elasticsearch/mget", 1), + ("Datastore/statement/Elasticsearch/contacts/index", 2), +) + +_disable_scoped_metrics = list(_base_scoped_metrics) +_disable_rollup_metrics = list(_base_rollup_metrics) + +_enable_scoped_metrics = list(_base_scoped_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + +if len(ES_MULTIPLE_SETTINGS) > 1: + es_1 = ES_MULTIPLE_SETTINGS[0] + es_2 = ES_MULTIPLE_SETTINGS[1] + + host_1 = instance_hostname(es_1["host"]) + port_1 = es_1["port"] + + host_2 = instance_hostname(es_2["host"]) + port_2 = es_2["port"] + + instance_metric_name_1 = f"Datastore/instance/Elasticsearch/{host_1}/{port_1}" + instance_metric_name_2 = f"Datastore/instance/Elasticsearch/{host_2}/{port_2}" + + _enable_rollup_metrics.extend([(instance_metric_name_1, 2), (instance_metric_name_2, 1)]) + + _disable_rollup_metrics.extend([(instance_metric_name_1, None), (instance_metric_name_2, None)]) + + +@pytest.fixture(scope="module") +def client(): + urls = [f"http://{db['host']}:{db['port']}" for db in ES_MULTIPLE_SETTINGS] + # When selecting a connection from the pool, use the round robin method. + # This is actually the default already. Using round robin will ensure that + # doing two db calls will mean elastic search is talking to two different + # dbs. + if ES_VERSION >= (8,): + client = AsyncElasticsearch(urls, node_selector_class=RoundRobinSelector, randomize_hosts=False) + else: + client = AsyncElasticsearch(urls, selector_class=RoundRobinSelector, randomize_hosts=False) + return client + + +# Query + + +async def _exercise_es_multi(es): + # set on db 1 + if ES_VERSION >= (8,): + await es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + # set on db 2 + await es.index(index="contacts", body={"name": "Jane Tester", "age": 22, "title": "Senior QA Engineer"}, id=2) + else: + await es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + # set on db 2 + await es.index( + index="contacts", + doc_type="person", + body={"name": "Jane Tester", "age": 22, "title": "Senior QA Engineer"}, + id=2, + ) + + # ask db 1, will return info from db 1 and 2 + mget_body = {"docs": [{"_id": 1, "_index": "contacts"}, {"_id": 2, "_index": "contacts"}]} + + results = await es.mget(body=mget_body) + assert len(results["docs"]) == 2 + + +# Test + + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_async_mget:test_async_multi_get_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_async_multi_get_enabled(client, loop): + loop.run_until_complete(_exercise_es_multi(client)) + + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_async_mget:test_async_multi_get_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_async_multi_get_disabled(client, loop): + loop.run_until_complete(_exercise_es_multi(client)) diff --git a/tests/datastore_elasticsearch/test_async_multiple_dbs.py b/tests/datastore_elasticsearch/test_async_multiple_dbs.py new file mode 100644 index 000000000..fc2a13265 --- /dev/null +++ b/tests/datastore_elasticsearch/test_async_multiple_dbs.py @@ -0,0 +1,110 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from conftest import ES_MULTIPLE_SETTINGS, ES_VERSION +from elasticsearch import AsyncElasticsearch +from testing_support.fixtures import override_application_settings +from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics + +from newrelic.api.background_task import background_task + +# Settings + +_enable_instance_settings = {"datastore_tracer.instance_reporting.enabled": True} +_disable_instance_settings = {"datastore_tracer.instance_reporting.enabled": False} + +# Metrics + +_base_scoped_metrics = (("Datastore/statement/Elasticsearch/contacts/index", 2),) + +_base_rollup_metrics = ( + ("Datastore/all", 2), + ("Datastore/allOther", 2), + ("Datastore/Elasticsearch/all", 2), + ("Datastore/Elasticsearch/allOther", 2), + ("Datastore/operation/Elasticsearch/index", 2), + ("Datastore/statement/Elasticsearch/contacts/index", 2), +) + +_disable_scoped_metrics = list(_base_scoped_metrics) +_disable_rollup_metrics = list(_base_rollup_metrics) + +_enable_scoped_metrics = list(_base_scoped_metrics) +_enable_rollup_metrics = list(_base_rollup_metrics) + +if len(ES_MULTIPLE_SETTINGS) > 1: + es_1 = ES_MULTIPLE_SETTINGS[0] + es_2 = ES_MULTIPLE_SETTINGS[1] + + host_1 = instance_hostname(es_1["host"]) + port_1 = es_1["port"] + + host_2 = instance_hostname(es_2["host"]) + port_2 = es_2["port"] + + instance_metric_name_1 = f"Datastore/instance/Elasticsearch/{host_1}/{port_1}" + instance_metric_name_2 = f"Datastore/instance/Elasticsearch/{host_2}/{port_2}" + + _enable_rollup_metrics.extend([(instance_metric_name_1, 1), (instance_metric_name_2, 1)]) + + _disable_rollup_metrics.extend([(instance_metric_name_1, None), (instance_metric_name_2, None)]) + +# Query + + +async def _exercise_es(es): + if ES_VERSION >= (8,): + await es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1) + else: + await es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Engineer"}, id=1 + ) + + +# Test + + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") +@override_application_settings(_enable_instance_settings) +@validate_transaction_metrics( + "test_async_multiple_dbs:test_async_multiple_dbs_enabled", + scoped_metrics=_enable_scoped_metrics, + rollup_metrics=_enable_rollup_metrics, + background_task=True, +) +@background_task() +def test_async_multiple_dbs_enabled(loop): + for db in ES_MULTIPLE_SETTINGS: + es_url = f"http://{db['host']}:{db['port']}" + client = AsyncElasticsearch(es_url) + loop.run_until_complete(_exercise_es(client)) + + +@pytest.mark.skipif(len(ES_MULTIPLE_SETTINGS) < 2, reason="Test environment not configured with multiple databases.") +@override_application_settings(_disable_instance_settings) +@validate_transaction_metrics( + "test_async_multiple_dbs:test_async_multiple_dbs_disabled", + scoped_metrics=_disable_scoped_metrics, + rollup_metrics=_disable_rollup_metrics, + background_task=True, +) +@background_task() +def test_async_multiple_dbs_disabled(loop): + for db in ES_MULTIPLE_SETTINGS: + es_url = f"http://{db['host']}:{db['port']}" + client = AsyncElasticsearch(es_url) + loop.run_until_complete(_exercise_es(client)) diff --git a/tests/datastore_elasticsearch/test_async_trace_node.py b/tests/datastore_elasticsearch/test_async_trace_node.py new file mode 100644 index 000000000..3fba90cba --- /dev/null +++ b/tests/datastore_elasticsearch/test_async_trace_node.py @@ -0,0 +1,92 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from testing_support.fixtures import override_application_settings, validate_tt_parenting +from testing_support.fixture.event_loop import event_loop as loop # noqa: F401 +from testing_support.util import instance_hostname +from testing_support.validators.validate_tt_collector_json import validate_tt_collector_json + +from newrelic.api.background_task import background_task + +from conftest import ES_SETTINGS, ES_VERSION + +# Settings + +_enable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": True, +} +_disable_instance_settings = { + "datastore_tracer.instance_reporting.enabled": False, + "datastore_tracer.database_name_reporting.enabled": False, +} +_instance_only_settings = { + "datastore_tracer.instance_reporting.enabled": True, + "datastore_tracer.database_name_reporting.enabled": False, +} + +# Expected parameters + +_enabled_required = {"host": instance_hostname(ES_SETTINGS["host"]), "port_path_or_id": str(ES_SETTINGS["port"])} +_enabled_forgone = {"db.instance": "VALUE NOT USED"} + +_disabled_required = {} +_disabled_forgone = {"host": "VALUE NOT USED", "port_path_or_id": "VALUE NOT USED", "db.instance": "VALUE NOT USED"} + +_instance_only_required = {"host": instance_hostname(ES_SETTINGS["host"]), "port_path_or_id": str(ES_SETTINGS["port"])} +_instance_only_forgone = {"db.instance": "VALUE NOT USED"} + +_tt_parenting = ("TransactionNode", [("DatastoreNode", [])]) + + +# Query + + +async def _exercise_es_v7(es): + await es.index( + index="contacts", doc_type="person", body={"name": "Joe Tester", "age": 25, "title": "QA Master"}, id=1 + ) + + +async def _exercise_es_v8(es): + await es.index(index="contacts", body={"name": "Joe Tester", "age": 25, "title": "QA Master"}, id=1) + + +_exercise_es = _exercise_es_v7 if ES_VERSION < (8, 0, 0) else _exercise_es_v8 + +# Tests + + +@override_application_settings(_enable_instance_settings) +@validate_tt_collector_json(datastore_params=_enabled_required, datastore_forgone_params=_enabled_forgone) +@validate_tt_parenting(_tt_parenting) +@background_task() +def test_async_trace_node_datastore_params_enable_instance(async_client, loop): + loop.run_until_complete(_exercise_es(async_client)) + + +@override_application_settings(_disable_instance_settings) +@validate_tt_collector_json(datastore_params=_disabled_required, datastore_forgone_params=_disabled_forgone) +@validate_tt_parenting(_tt_parenting) +@background_task() +def test_async_trace_node_datastore_params_disable_instance(async_client, loop): + loop.run_until_complete(_exercise_es(async_client)) + + +@override_application_settings(_instance_only_settings) +@validate_tt_collector_json(datastore_params=_instance_only_required, datastore_forgone_params=_instance_only_forgone) +@validate_tt_parenting(_tt_parenting) +@background_task() +def test_async_trace_node_datastore_params_instance_only(async_client, loop): + loop.run_until_complete(_exercise_es(async_client)) diff --git a/tests/datastore_elasticsearch/test_async_transport.py b/tests/datastore_elasticsearch/test_async_transport.py new file mode 100644 index 000000000..a90e85404 --- /dev/null +++ b/tests/datastore_elasticsearch/test_async_transport.py @@ -0,0 +1,84 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, ES_VERSION 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from conftest import ES_SETTINGS, ES_VERSION +from elasticsearch.serializer import JSONSerializer + +from newrelic.api.background_task import background_task +from newrelic.api.transaction import current_transaction + +try: + from elasticsearch.connection.http_aiohttp import AiohttpHttpConnection + from elasticsearch.connection.http_httpx import HttpxHttpConnection + from elasticsearch.transport import Transport, AsyncTransport + + NodeConfig = dict +except ImportError: + from elastic_transport._models import NodeConfig + from elastic_transport._node._http_aiohttp import AiohttpHttpNode as AiohttpHttpConnection + from elastic_transport._node._http_httpx import HttpxAsyncHttpNode as HttpxHttpConnection + from elastic_transport._async_transport import AsyncTransport + + +IS_V8 = ES_VERSION >= (8,) +IS_V7 = ES_VERSION >= (7,) and ES_VERSION < (8, 0) +IS_BELOW_V7 = ES_VERSION < (7,) + +RUN_IF_V8 = pytest.mark.skipif(IS_V7 or IS_BELOW_V7, reason="Only run for v8+") +RUN_IF_V7 = pytest.mark.skipif(IS_V8 or IS_BELOW_V7, reason="Only run for v7") +RUN_IF_BELOW_V7 = pytest.mark.skipif(not IS_BELOW_V7, reason="Only run for versions below v7") + + +HOST = NodeConfig(scheme="http", host=ES_SETTINGS["host"], port=8080) + +METHOD = "/contacts/person/1" +HEADERS = {"Content-Type": "application/json"} +DATA = {"name": "Joe Tester"} + +BODY = JSONSerializer().dumps(DATA) +if hasattr(BODY, "encode"): + BODY = BODY.encode("utf-8") + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "transport_kwargs, perform_request_kwargs", + [ + pytest.param( + {"node_class": AiohttpHttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="AiohttpHttpConnectionV8", + marks=RUN_IF_V8, + ), + pytest.param( + {"node_class": HttpxHttpConnection}, + {"headers": HEADERS, "body": DATA}, + id="HttpxHttpConnectionV8", + marks=RUN_IF_V8, + ), + pytest.param( + {"node_class": AiohttpHttpConnection}, {"body": DATA}, id="AiohttpHttpConnectionV7", marks=RUN_IF_V7 + ), + ], +) +@background_task() +async def test_async_transport_connection_classes(transport_kwargs, perform_request_kwargs): + transaction = current_transaction() + + transport = AsyncTransport([HOST], **transport_kwargs) + await transport.perform_request("POST", METHOD, **perform_request_kwargs) + + expected = (ES_SETTINGS["host"], ES_SETTINGS["port"], None) + assert transaction._nr_datastore_instance_info == expected diff --git a/tox.ini b/tox.ini index 3ab16a41a..e7b304bb6 100644 --- a/tox.ini +++ b/tox.ini @@ -260,6 +260,7 @@ deps = datastore_cassandradriver-cassandralatest: cassandra-driver datastore_cassandradriver-cassandralatest: twisted datastore_elasticsearch: requests + datastore_elasticsearch: httpx datastore_elasticsearch-elasticsearch07: elasticsearch<8.0 datastore_elasticsearch-elasticsearch08: elasticsearch<9.0 datastore_firestore: google-cloud-firestore