diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index bc4a5324a..3f892b407 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -5,18 +5,16 @@ FROM ghcr.io/newrelic/newrelic-python-agent-ci:${IMAGE_TAG} # Setup non-root user USER root ARG UID=1000 -ARG GID=$UID +ARG GID=${UID} ENV HOME /home/vscode RUN mkdir -p ${HOME} && \ groupadd --gid ${GID} vscode && \ useradd --uid ${UID} --gid ${GID} --home ${HOME} vscode && \ chown -R ${UID}:${GID} /home/vscode -# Move pyenv installation -ENV PYENV_ROOT="${HOME}/.pyenv" -ENV PATH="$PYENV_ROOT/bin:$PYENV_ROOT/shims:${PATH}" -RUN mv /root/.pyenv /home/vscode/.pyenv && \ - chown -R vscode:vscode /home/vscode/.pyenv +# Fix pyenv installation +RUN echo 'eval "$(pyenv init -)"' >>${HOME}/.bashrc && \ + chown -R vscode:vscode ${PYENV_ROOT} # Set user USER ${UID}:${GID} diff --git a/.github/actions/update-rpm-config/action.yml b/.github/actions/update-rpm-config/action.yml index 9d19ebba0..01a367c51 100644 --- a/.github/actions/update-rpm-config/action.yml +++ b/.github/actions/update-rpm-config/action.yml @@ -5,10 +5,6 @@ inputs: description: "Language agent to configure (eg. python)" required: true default: "python" - target-system: - description: "Target System: prod|staging|all" - required: true - default: "all" agent-version: description: "3-4 digit agent version number (eg. 1.2.3) with optional leading v (ignored)" required: true @@ -19,9 +15,6 @@ inputs: production-api-key: description: "API key for New Relic Production" required: false - staging-api-key: - description: "API key for New Relic Staging" - required: false runs: using: "composite" @@ -42,64 +35,31 @@ runs: run: | echo "CONTENT_TYPE='Content-Type: application/json'" >> $GITHUB_ENV - - name: Update Staging system configuration page + - name: Update system configuration page shell: bash - if: ${{ inputs.dry-run == 'false' && (inputs.target-system == 'staging' || inputs.target-system == 'all') }} - run: | - curl -X POST 'https://staging-api.newrelic.com/v2/system_configuration.json' \ - -H "X-Api-Key:${{ inputs.staging-api-key }}" -i \ - -H ${{ env.CONTENT_TYPE }} \ - -d ${{ env.PAYLOAD }} - - - name: Update Production system configuration page - shell: bash - if: ${{ inputs.dry-run == 'false' && (inputs.target-system == 'prod' || inputs.target-system == 'all') }} + if: ${{ inputs.dry-run == 'false' }} run: | curl -X POST 'https://api.newrelic.com/v2/system_configuration.json' \ -H "X-Api-Key:${{ inputs.production-api-key }}" -i \ -H ${{ env.CONTENT_TYPE }} \ -d ${{ env.PAYLOAD }} - - name: Verify Staging system configuration update - shell: bash - if: ${{ inputs.dry-run == 'false' && (inputs.target-system == 'staging' || inputs.target-system == 'all') }} - run: | - STAGING_VERSION=$(curl -X GET 'https://staging-api.newrelic.com/v2/system_configuration.json' \ - -H "X-Api-Key:${{ inputs.staging-api-key }}" \ - -H "${{ env.CONTENT_TYPE }}" | jq ".system_configurations | from_entries | .${{inputs.agent-language}}_agent_version") - - if [ "${{ env.AGENT_VERSION }}" != "$STAGING_VERSION" ]; then - echo "Staging version mismatch: $STAGING_VERSION" - exit 1 - fi - - - name: Verify Production system configuration update + - name: Verify system configuration update shell: bash - if: ${{ inputs.dry-run == 'false' && (inputs.target-system == 'prod' || inputs.target-system == 'all') }} + if: ${{ inputs.dry-run == 'false' }} run: | PROD_VERSION=$(curl -X GET 'https://api.newrelic.com/v2/system_configuration.json' \ -H "X-Api-Key:${{ inputs.production-api-key }}" \ -H "${{ env.CONTENT_TYPE }}" | jq ".system_configurations | from_entries | .${{inputs.agent-language}}_agent_version") if [ "${{ env.AGENT_VERSION }}" != "$PROD_VERSION" ]; then - echo "Production version mismatch: $PROD_VERSION" + echo "Version mismatch: $PROD_VERSION" exit 1 fi - - name: (dry-run) Update Staging system configuration page - shell: bash - if: ${{ inputs.dry-run != 'false' && (inputs.target-system == 'staging' || inputs.target-system == 'all') }} - run: | - cat << EOF - curl -X POST 'https://staging-api.newrelic.com/v2/system_configuration.json' \ - -H "X-Api-Key:**REDACTED**" -i \ - -H ${{ env.CONTENT_TYPE }} \ - -d ${{ env.PAYLOAD }} - EOF - - - name: (dry-run) Update Production system configuration page + - name: (dry-run) Update system configuration page shell: bash - if: ${{ inputs.dry-run != 'false' && (inputs.target-system == 'prod' || inputs.target-system == 'all') }} + if: ${{ inputs.dry-run != 'false' }} run: | cat << EOF curl -X POST 'https://api.newrelic.com/v2/system_configuration.json' \ diff --git a/.github/containers/Dockerfile b/.github/containers/Dockerfile index 57d8c234c..d2d8e9024 100644 --- a/.github/containers/Dockerfile +++ b/.github/containers/Dockerfile @@ -89,10 +89,10 @@ ENV HOME /root WORKDIR "${HOME}" # Install pyenv -ENV PYENV_ROOT="${HOME}/.pyenv" +ENV PYENV_ROOT="/usr/local/pyenv" RUN curl https://pyenv.run/ | /bin/bash -ENV PATH="$PYENV_ROOT/bin:$PYENV_ROOT/shims:${PATH}" -RUN echo 'eval "$(pyenv init -)"' >>$HOME/.bashrc && \ +ENV PATH="${PYENV_ROOT}/bin:${PYENV_ROOT}/shims:${PATH}" +RUN echo 'eval "$(pyenv init -)"' >>${HOME}/.bashrc && \ pyenv update # Install Python diff --git a/.github/workflows/deploy-python.yml b/.github/workflows/deploy-python.yml index ca908b825..6ad06398f 100644 --- a/.github/workflows/deploy-python.yml +++ b/.github/workflows/deploy-python.yml @@ -88,5 +88,5 @@ jobs: target-system: "all" agent-version: "${{ github.ref_name }}" dry-run: "false" - production-api-key: ${{ secrets.NEW_RELIC_API_KEY_PRODUCTION }}" - staging-api-key: ${{ secrets.NEW_RELIC_API_KEY_STAGING }}" + production-api-key: ${{ secrets.NEW_RELIC_API_KEY_PRODUCTION }} + staging-api-key: ${{ secrets.NEW_RELIC_API_KEY_STAGING }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 402d0c629..b44aa8e84 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -36,7 +36,6 @@ jobs: - python - elasticsearchserver07 - elasticsearchserver08 - - gearman - grpc - kafka - memcached @@ -967,63 +966,6 @@ jobs: path: ./**/.coverage.* retention-days: 1 - gearman: - env: - TOTAL_GROUPS: 1 - - strategy: - fail-fast: false - matrix: - group-number: [1] - - runs-on: ubuntu-20.04 - container: - image: ghcr.io/newrelic/newrelic-python-agent-ci:latest - options: >- - --add-host=host.docker.internal:host-gateway - timeout-minutes: 30 - - services: - gearman: - image: artefactual/gearmand - ports: - - 8080:4730 - # Set health checks to wait until gearman has started - options: >- - --health-cmd "(echo status ; sleep 0.1) | nc 127.0.0.1 4730 -w 1" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - - steps: - - uses: actions/checkout@v3 - - - name: Fetch git tags - run: | - git config --global --add safe.directory "$GITHUB_WORKSPACE" - git fetch --tags origin - - - name: Get Environments - id: get-envs - run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> $GITHUB_OUTPUT - env: - GROUP_NUMBER: ${{ matrix.group-number }} - - - name: Test - run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto - env: - TOX_PARALLEL_NO_SPINNER: 1 - PY_COLORS: 0 - - - name: Upload Coverage Artifacts - uses: actions/upload-artifact@v3 - with: - name: coverage-${{ github.job }}-${{ strategy.job-index }} - path: ./**/.coverage.* - retention-days: 1 - firestore: env: TOTAL_GROUPS: 1 diff --git a/newrelic/admin/__init__.py b/newrelic/admin/__init__.py index e41599a31..509037dd5 100644 --- a/newrelic/admin/__init__.py +++ b/newrelic/admin/__init__.py @@ -14,27 +14,26 @@ from __future__ import print_function -import sys import logging +import sys _builtin_plugins = [ - 'debug_console', - 'generate_config', - 'license_key', - 'local_config', - 'network_config', - 'record_deploy', - 'run_program', - 'run_python', - 'server_config', - 'validate_config' + "debug_console", + "generate_config", + "license_key", + "local_config", + "network_config", + "record_deploy", + "run_program", + "run_python", + "server_config", + "validate_config", ] _commands = {} -def command(name, options='', description='', hidden=False, - log_intercept=True, deprecated=False): +def command(name, options="", description="", hidden=False, log_intercept=True, deprecated=False): def wrapper(callback): callback.name = name callback.options = options @@ -44,6 +43,7 @@ def wrapper(callback): callback.deprecated = deprecated _commands[name] = callback return callback + return wrapper @@ -51,15 +51,15 @@ def usage(name): details = _commands[name] if details.deprecated: print("[WARNING] This command is deprecated and will be removed") - print('Usage: newrelic-admin %s %s' % (name, details.options)) + print("Usage: newrelic-admin %s %s" % (name, details.options)) -@command('help', '[command]', hidden=True) +@command("help", "[command]", hidden=True) def help(args): if not args: - print('Usage: newrelic-admin command [options]') + print("Usage: newrelic-admin command [options]") print() - print("Type 'newrelic-admin help '", end='') + print("Type 'newrelic-admin help '", end="") print("for help on a specific command.") print() print("Available commands are:") @@ -68,24 +68,24 @@ def help(args): for name in commands: details = _commands[name] if not details.hidden: - print(' ', name) + print(" ", name) else: name = args[0] if name not in _commands: - print("Unknown command '%s'." % name, end=' ') + print("Unknown command '%s'." % name, end=" ") print("Type 'newrelic-admin help' for usage.") else: details = _commands[name] - print('Usage: newrelic-admin %s %s' % (name, details.options)) + print("Usage: newrelic-admin %s %s" % (name, details.options)) if details.description: print() description = details.description if details.deprecated: - description = '[DEPRECATED] ' + description + description = "[DEPRECATED] " + description print(description) @@ -99,7 +99,7 @@ def emit(self, record): if len(logging.root.handlers) != 0: return - if record.name.startswith('newrelic.packages'): + if record.name.startswith("newrelic.packages"): return if record.levelno < logging.WARNING: @@ -107,9 +107,9 @@ def emit(self, record): return logging.StreamHandler.emit(self, record) - _stdout_logger = logging.getLogger('newrelic') + _stdout_logger = logging.getLogger("newrelic") _stdout_handler = FilteredStreamHandler(sys.stdout) - _stdout_format = '%(levelname)s - %(message)s\n' + _stdout_format = "%(levelname)s - %(message)s\n" _stdout_formatter = logging.Formatter(_stdout_format) _stdout_handler.setFormatter(_stdout_formatter) _stdout_logger.addHandler(_stdout_handler) @@ -117,19 +117,27 @@ def emit(self, record): def load_internal_plugins(): for name in _builtin_plugins: - module_name = '%s.%s' % (__name__, name) + module_name = "%s.%s" % (__name__, name) __import__(module_name) def load_external_plugins(): try: - import pkg_resources + # Preferred after Python 3.10 + if sys.version_info >= (3, 10): + from importlib.metadata import entry_points + # Introduced in Python 3.8 + elif sys.version_info >= (3, 8) and sys.version_info <= (3, 9): + from importlib_metadata import entry_points + # Removed in Python 3.12 + else: + from pkg_resources import iter_entry_points as entry_points except ImportError: return - group = 'newrelic.admin' + group = "newrelic.admin" - for entrypoint in pkg_resources.iter_entry_points(group=group): + for entrypoint in entry_points(group=group): __import__(entrypoint.module_name) @@ -138,12 +146,12 @@ def main(): if len(sys.argv) > 1: command = sys.argv[1] else: - command = 'help' + command = "help" callback = _commands[command] except Exception: - print("Unknown command '%s'." % command, end='') + print("Unknown command '%s'." % command, end="") print("Type 'newrelic-admin help' for usage.") sys.exit(1) @@ -156,5 +164,5 @@ def main(): load_internal_plugins() load_external_plugins() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/newrelic/api/cat_header_mixin.py b/newrelic/api/cat_header_mixin.py index fe5c0a71f..b8251fdca 100644 --- a/newrelic/api/cat_header_mixin.py +++ b/newrelic/api/cat_header_mixin.py @@ -22,6 +22,7 @@ class CatHeaderMixin(object): cat_transaction_key = 'X-NewRelic-Transaction' cat_appdata_key = 'X-NewRelic-App-Data' cat_synthetics_key = 'X-NewRelic-Synthetics' + cat_synthetics_info_key = 'X-NewRelic-Synthetics-Info' cat_metadata_key = 'x-newrelic-trace' cat_distributed_trace_key = 'newrelic' settings = None @@ -105,8 +106,9 @@ def generate_request_headers(cls, transaction): (cls.cat_transaction_key, encoded_transaction)) if transaction.synthetics_header: - nr_headers.append( - (cls.cat_synthetics_key, transaction.synthetics_header)) + nr_headers.append((cls.cat_synthetics_key, transaction.synthetics_header)) + if transaction.synthetics_info_header: + nr_headers.append((cls.cat_synthetics_info_key, transaction.synthetics_info_header)) return nr_headers diff --git a/newrelic/api/message_trace.py b/newrelic/api/message_trace.py index f564c41cb..e0fa5956d 100644 --- a/newrelic/api/message_trace.py +++ b/newrelic/api/message_trace.py @@ -27,6 +27,7 @@ class MessageTrace(CatHeaderMixin, TimeTrace): cat_transaction_key = "NewRelicTransaction" cat_appdata_key = "NewRelicAppData" cat_synthetics_key = "NewRelicSynthetics" + cat_synthetics_info_key = "NewRelicSyntheticsInfo" def __init__(self, library, operation, destination_type, destination_name, params=None, terminal=True, **kwargs): parent = kwargs.pop("parent", None) diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index 988b56be6..bbe8f3852 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -44,6 +44,7 @@ json_decode, json_encode, obfuscate, + snake_case, ) from newrelic.core.attribute import ( MAX_ATTRIBUTE_LENGTH, @@ -303,10 +304,17 @@ def __init__(self, application, enabled=None, source=None): self._alternate_path_hashes = {} self.is_part_of_cat = False + # Synthetics Header self.synthetics_resource_id = None self.synthetics_job_id = None self.synthetics_monitor_id = None self.synthetics_header = None + + # Synthetics Info Header + self.synthetics_type = None + self.synthetics_initiator = None + self.synthetics_attributes = None + self.synthetics_info_header = None self._custom_metrics = CustomMetrics() self._dimensional_metrics = DimensionalMetrics() @@ -603,6 +611,10 @@ def __exit__(self, exc, value, tb): synthetics_job_id=self.synthetics_job_id, synthetics_monitor_id=self.synthetics_monitor_id, synthetics_header=self.synthetics_header, + synthetics_type=self.synthetics_type, + synthetics_initiator=self.synthetics_initiator, + synthetics_attributes=self.synthetics_attributes, + synthetics_info_header=self.synthetics_info_header, is_part_of_cat=self.is_part_of_cat, trip_id=self.trip_id, path_hash=self.path_hash, @@ -840,6 +852,16 @@ def trace_intrinsics(self): i_attrs["synthetics_job_id"] = self.synthetics_job_id if self.synthetics_monitor_id: i_attrs["synthetics_monitor_id"] = self.synthetics_monitor_id + if self.synthetics_type: + i_attrs["synthetics_type"] = self.synthetics_type + if self.synthetics_initiator: + i_attrs["synthetics_initiator"] = self.synthetics_initiator + if self.synthetics_attributes: + # Add all synthetics attributes + for k, v in self.synthetics_attributes.items(): + if k: + i_attrs["synthetics_%s" % snake_case(k)] = v + if self.total_time: i_attrs["totalTime"] = self.total_time if self._loop_time: diff --git a/newrelic/api/web_transaction.py b/newrelic/api/web_transaction.py index 9749e2619..47155425e 100644 --- a/newrelic/api/web_transaction.py +++ b/newrelic/api/web_transaction.py @@ -125,6 +125,27 @@ def _parse_synthetics_header(header): return synthetics +def _parse_synthetics_info_header(header): + # Return a dictionary of values from SyntheticsInfo header + # Returns empty dict, if version is not supported. + + synthetics_info = {} + version = None + + try: + version = int(header.get("version")) + + if version == 1: + synthetics_info['version'] = version + synthetics_info['type'] = header.get("type") + synthetics_info['initiator'] = header.get("initiator") + synthetics_info['attributes'] = header.get("attributes") + except Exception: + return + + return synthetics_info + + def _remove_query_string(url): url = ensure_str(url) out = urlparse.urlsplit(url) @@ -231,6 +252,7 @@ def _process_synthetics_header(self): settings.trusted_account_ids and \ settings.encoding_key: + # Synthetics Header encoded_header = self._request_headers.get('x-newrelic-synthetics') encoded_header = encoded_header and ensure_str(encoded_header) if not encoded_header: @@ -241,11 +263,20 @@ def _process_synthetics_header(self): settings.encoding_key) synthetics = _parse_synthetics_header(decoded_header) + # Synthetics Info Header + encoded_info_header = self._request_headers.get('x-newrelic-synthetics-info') + encoded_info_header = encoded_info_header and ensure_str(encoded_info_header) + + decoded_info_header = decode_newrelic_header( + encoded_info_header, + settings.encoding_key) + synthetics_info = _parse_synthetics_info_header(decoded_info_header) + if synthetics and \ synthetics['account_id'] in \ settings.trusted_account_ids: - # Save obfuscated header, because we will pass it along + # Save obfuscated headers, because we will pass them along # unchanged in all external requests. self.synthetics_header = encoded_header @@ -253,6 +284,12 @@ def _process_synthetics_header(self): self.synthetics_job_id = synthetics['job_id'] self.synthetics_monitor_id = synthetics['monitor_id'] + if synthetics_info: + self.synthetics_info_header = encoded_info_header + self.synthetics_type = synthetics_info['type'] + self.synthetics_initiator = synthetics_info['initiator'] + self.synthetics_attributes = synthetics_info['attributes'] + def _process_context_headers(self): # Process the New Relic cross process ID header and extract # the relevant details. diff --git a/newrelic/common/encoding_utils.py b/newrelic/common/encoding_utils.py index ef8624240..bee53df1a 100644 --- a/newrelic/common/encoding_utils.py +++ b/newrelic/common/encoding_utils.py @@ -571,3 +571,46 @@ def decode(cls, payload, tk): data['pr'] = None return data + + +def capitalize(string): + """Capitalize the first letter of a string.""" + if not string: + return string + elif len(string) == 1: + return string.capitalize() + else: + return "".join((string[0].upper(), string[1:])) + + +def camel_case(string, upper=False): + """ + Convert a string of snake case to camel case. + + Setting upper=True will capitalize the first letter. Defaults to False, where no change is made to the first letter. + """ + string = ensure_str(string) + split_string = list(string.split("_")) + + if len(split_string) < 2: + if upper: + return capitalize(string) + else: + return string + else: + if upper: + camel_cased_string = "".join([capitalize(substr) for substr in split_string]) + else: + camel_cased_string = split_string[0] + "".join([capitalize(substr) for substr in split_string[1:]]) + + return camel_cased_string + + +_snake_case_re = re.compile(r"([A-Z]+[a-z]*)") +def snake_case(string): + """Convert a string of camel case to snake case. Assumes no repeated runs of capital letters.""" + string = ensure_str(string) + if "_" in string: + return string # Don't touch strings that are already snake cased + + return "_".join([s for s in _snake_case_re.split(string) if s]).lower() diff --git a/newrelic/common/object_wrapper.py b/newrelic/common/object_wrapper.py index 7d9824fe0..c67696610 100644 --- a/newrelic/common/object_wrapper.py +++ b/newrelic/common/object_wrapper.py @@ -19,16 +19,19 @@ """ -import sys import inspect -from newrelic.packages import six - -from newrelic.packages.wrapt import (ObjectProxy as _ObjectProxy, - FunctionWrapper as _FunctionWrapper, - BoundFunctionWrapper as _BoundFunctionWrapper) - -from newrelic.packages.wrapt.wrappers import _FunctionWrapperBase +from newrelic.packages.wrapt import BoundFunctionWrapper as _BoundFunctionWrapper +from newrelic.packages.wrapt import CallableObjectProxy as _CallableObjectProxy +from newrelic.packages.wrapt import FunctionWrapper as _FunctionWrapper +from newrelic.packages.wrapt import ObjectProxy as _ObjectProxy +from newrelic.packages.wrapt import ( # noqa: F401; pylint: disable=W0611 + apply_patch, + resolve_path, + wrap_object, + wrap_object_attribute, +) +from newrelic.packages.wrapt.__wrapt__ import _FunctionWrapperBase # We previously had our own pure Python implementation of the generic # object wrapper but we now defer to using the wrapt module as its C @@ -47,28 +50,36 @@ # ObjectProxy or FunctionWrapper should be used going forward. -class _ObjectWrapperBase(object): +class ObjectProxy(_ObjectProxy): + """ + This class provides method overrides for all object wrappers used by the + agent. These methods allow attributes to be defined with the special prefix + _nr_ to be interpretted as attributes on the wrapper, rather than the + wrapped object. Inheriting from the base class wrapt.ObjectProxy preserves + method resolution order (MRO) through multiple inheritance. + (See https://www.python.org/download/releases/2.3/mro/). + """ def __setattr__(self, name, value): - if name.startswith('_nr_'): - name = name.replace('_nr_', '_self_', 1) + if name.startswith("_nr_"): + name = name.replace("_nr_", "_self_", 1) setattr(self, name, value) else: - _ObjectProxy.__setattr__(self, name, value) + super(ObjectProxy, self).__setattr__(name, value) def __getattr__(self, name): - if name.startswith('_nr_'): - name = name.replace('_nr_', '_self_', 1) + if name.startswith("_nr_"): + name = name.replace("_nr_", "_self_", 1) return getattr(self, name) else: - return _ObjectProxy.__getattr__(self, name) + return super(ObjectProxy, self).__getattr__(name) def __delattr__(self, name): - if name.startswith('_nr_'): - name = name.replace('_nr_', '_self_', 1) + if name.startswith("_nr_"): + name = name.replace("_nr_", "_self_", 1) delattr(self, name) else: - _ObjectProxy.__delattr__(self, name) + super(ObjectProxy, self).__delattr__(name) @property def _nr_next_object(self): @@ -79,8 +90,7 @@ def _nr_last_object(self): try: return self._self_last_object except AttributeError: - self._self_last_object = getattr(self.__wrapped__, - '_nr_last_object', self.__wrapped__) + self._self_last_object = getattr(self.__wrapped__, "_nr_last_object", self.__wrapped__) return self._self_last_object @property @@ -96,166 +106,45 @@ def _nr_parent(self): return self._self_parent -class _NRBoundFunctionWrapper(_ObjectWrapperBase, _BoundFunctionWrapper): +class _NRBoundFunctionWrapper(ObjectProxy, _BoundFunctionWrapper): pass -class FunctionWrapper(_ObjectWrapperBase, _FunctionWrapper): +class FunctionWrapper(ObjectProxy, _FunctionWrapper): __bound_function_wrapper__ = _NRBoundFunctionWrapper -class ObjectProxy(_ObjectProxy): - - def __setattr__(self, name, value): - if name.startswith('_nr_'): - name = name.replace('_nr_', '_self_', 1) - setattr(self, name, value) - else: - _ObjectProxy.__setattr__(self, name, value) - - def __getattr__(self, name): - if name.startswith('_nr_'): - name = name.replace('_nr_', '_self_', 1) - return getattr(self, name) - else: - return _ObjectProxy.__getattr__(self, name) - - def __delattr__(self, name): - if name.startswith('_nr_'): - name = name.replace('_nr_', '_self_', 1) - delattr(self, name) - else: - _ObjectProxy.__delattr__(self, name) - - @property - def _nr_next_object(self): - return self.__wrapped__ - - @property - def _nr_last_object(self): - try: - return self._self_last_object - except AttributeError: - self._self_last_object = getattr(self.__wrapped__, - '_nr_last_object', self.__wrapped__) - return self._self_last_object - - -class CallableObjectProxy(ObjectProxy): +class CallableObjectProxy(ObjectProxy, _CallableObjectProxy): + pass - def __call__(self, *args, **kwargs): - return self.__wrapped__(*args, **kwargs) # The ObjectWrapper class needs to be deprecated and removed once all our # own code no longer uses it. It reaches down into what are wrapt internals # at present which shouldn't be doing. -class ObjectWrapper(_ObjectWrapperBase, _FunctionWrapperBase): +class ObjectWrapper(ObjectProxy, _FunctionWrapperBase): __bound_function_wrapper__ = _NRBoundFunctionWrapper def __init__(self, wrapped, instance, wrapper): if isinstance(wrapped, classmethod): - binding = 'classmethod' + binding = "classmethod" elif isinstance(wrapped, staticmethod): - binding = 'staticmethod' + binding = "staticmethod" else: - binding = 'function' - - super(ObjectWrapper, self).__init__(wrapped, instance, wrapper, - binding=binding) - - -# Helper functions for performing monkey patching. + binding = "function" + super(ObjectWrapper, self).__init__(wrapped, instance, wrapper, binding=binding) -def resolve_path(module, name): - if isinstance(module, six.string_types): - __import__(module) - module = sys.modules[module] - - parent = module - - path = name.split('.') - attribute = path[0] - - original = getattr(parent, attribute) - for attribute in path[1:]: - parent = original - - # We can't just always use getattr() because in doing - # that on a class it will cause binding to occur which - # will complicate things later and cause some things not - # to work. For the case of a class we therefore access - # the __dict__ directly. To cope though with the wrong - # class being given to us, or a method being moved into - # a base class, we need to walk the class hierarchy to - # work out exactly which __dict__ the method was defined - # in, as accessing it from __dict__ will fail if it was - # not actually on the class given. Fallback to using - # getattr() if we can't find it. If it truly doesn't - # exist, then that will fail. - - if inspect.isclass(original): - for cls in inspect.getmro(original): - if attribute in vars(cls): - original = vars(cls)[attribute] - break - else: - original = getattr(original, attribute) - - else: - original = getattr(original, attribute) - - return (parent, attribute, original) - - -def apply_patch(parent, attribute, replacement): - setattr(parent, attribute, replacement) - - -def wrap_object(module, name, factory, args=(), kwargs={}): - (parent, attribute, original) = resolve_path(module, name) - wrapper = factory(original, *args, **kwargs) - apply_patch(parent, attribute, wrapper) - return wrapper - -# Function for apply a proxy object to an attribute of a class instance. -# The wrapper works by defining an attribute of the same name on the -# class which is a descriptor and which intercepts access to the -# instance attribute. Note that this cannot be used on attributes which -# are themselves defined by a property object. - - -class AttributeWrapper(object): - - def __init__(self, attribute, factory, args, kwargs): - self.attribute = attribute - self.factory = factory - self.args = args - self.kwargs = kwargs - - def __get__(self, instance, owner): - value = instance.__dict__[self.attribute] - return self.factory(value, *self.args, **self.kwargs) - - def __set__(self, instance, value): - instance.__dict__[self.attribute] = value - - def __delete__(self, instance): - del instance.__dict__[self.attribute] - - -def wrap_object_attribute(module, name, factory, args=(), kwargs={}): - path, attribute = name.rsplit('.', 1) - parent = resolve_path(module, path)[2] - wrapper = AttributeWrapper(attribute, factory, args, kwargs) - apply_patch(parent, attribute, wrapper) - return wrapper # Function for creating a decorator for applying to functions, as well as # short cut functions for applying wrapper functions via monkey patching. +# WARNING: These functions are reproduced directly from wrapt, but using +# our FunctionWrapper class which includes the _nr_ attriubte overrides +# that are inherited from our subclass of wrapt.ObjectProxy.These MUST be +# kept in sync with wrapt when upgrading, or drift may introduce bugs. + def function_wrapper(wrapper): def _wrapper(wrapped, instance, args, kwargs): @@ -267,6 +156,7 @@ def _wrapper(wrapped, instance, args, kwargs): else: target_wrapper = wrapper.__get__(instance, type(instance)) return FunctionWrapper(target_wrapped, target_wrapper) + return FunctionWrapper(wrapper, _wrapper) @@ -274,9 +164,10 @@ def wrap_function_wrapper(module, name, wrapper): return wrap_object(module, name, FunctionWrapper, (wrapper,)) -def patch_function_wrapper(module, name): +def patch_function_wrapper(module, name, enabled=None): def _wrapper(wrapper): - return wrap_object(module, name, FunctionWrapper, (wrapper,)) + return wrap_object(module, name, FunctionWrapper, (wrapper, enabled)) + return _wrapper @@ -299,10 +190,14 @@ def _execute(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) finally: setattr(parent, attribute, original) + return FunctionWrapper(target_wrapped, _execute) + return FunctionWrapper(wrapper, _wrapper) + return _decorator + # Generic decorators for performing actions before and after a wrapped # function is called, or modifying the inbound arguments or return value. @@ -315,6 +210,7 @@ def _wrapper(wrapped, instance, args, kwargs): else: function(*args, **kwargs) return wrapped(*args, **kwargs) + return _wrapper @@ -335,6 +231,7 @@ def _wrapper(wrapped, instance, args, kwargs): else: function(*args, **kwargs) return result + return _wrapper @@ -382,6 +279,7 @@ def out_function(function): @function_wrapper def _wrapper(wrapped, instance, args, kwargs): return function(wrapped(*args, **kwargs)) + return _wrapper diff --git a/newrelic/common/package_version_utils.py b/newrelic/common/package_version_utils.py index aa736a94e..5081f1bd0 100644 --- a/newrelic/common/package_version_utils.py +++ b/newrelic/common/package_version_utils.py @@ -115,7 +115,7 @@ def _get_package_version(name): for attr in VERSION_ATTRS: try: version = getattr(module, attr, None) - + # In certain cases like importlib_metadata.version, version is a callable # function. if callable(version): @@ -135,6 +135,7 @@ def _get_package_version(name): if hasattr(sys.modules["importlib"].metadata, "packages_distributions"): # pylint: disable=E1101 distributions = sys.modules["importlib"].metadata.packages_distributions() # pylint: disable=E1101 distribution_name = distributions.get(name, name) + distribution_name = distribution_name[0] if isinstance(distribution_name, list) else distribution_name else: distribution_name = name diff --git a/newrelic/common/signature.py b/newrelic/common/signature.py index 314998196..3fe516bdc 100644 --- a/newrelic/common/signature.py +++ b/newrelic/common/signature.py @@ -18,7 +18,7 @@ from inspect import Signature def bind_args(func, args, kwargs): - """Bind arguments and apply defaults to missing arugments for a callable.""" + """Bind arguments and apply defaults to missing arguments for a callable.""" bound_args = Signature.from_callable(func).bind(*args, **kwargs) bound_args.apply_defaults() return bound_args.arguments @@ -27,5 +27,5 @@ def bind_args(func, args, kwargs): from inspect import getcallargs def bind_args(func, args, kwargs): - """Bind arguments and apply defaults to missing arugments for a callable.""" + """Bind arguments and apply defaults to missing arguments for a callable.""" return getcallargs(func, *args, **kwargs) diff --git a/newrelic/common/utilization.py b/newrelic/common/utilization.py index f205b4e13..b7ddbdf43 100644 --- a/newrelic/common/utilization.py +++ b/newrelic/common/utilization.py @@ -265,13 +265,18 @@ def get_values(cls, response): class DockerUtilization(CommonUtilization): VENDOR_NAME = 'docker' EXPECTED_KEYS = ('id',) - METADATA_FILE = '/proc/self/cgroup' - DOCKER_RE = re.compile(r'([0-9a-f]{64,})') + + METADATA_FILE_CGROUPS_V1 = '/proc/self/cgroup' + METADATA_RE_CGROUPS_V1 = re.compile(r'[0-9a-f]{64,}') + + METADATA_FILE_CGROUPS_V2 = '/proc/self/mountinfo' + METADATA_RE_CGROUPS_V2 = re.compile(r'^.*/docker/containers/([0-9a-f]{64,})/.*$') @classmethod def fetch(cls): + # Try to read from cgroups try: - with open(cls.METADATA_FILE, 'rb') as f: + with open(cls.METADATA_FILE_CGROUPS_V1, 'rb') as f: for line in f: stripped = line.decode('utf-8').strip() cgroup = stripped.split(':') @@ -279,7 +284,23 @@ def fetch(cls): continue subsystems = cgroup[1].split(',') if 'cpu' in subsystems: - return cgroup[2] + contents = cgroup[2].split('/')[-1] + match = cls.METADATA_RE_CGROUPS_V1.search(contents) + if match: + return match.group(0) + except: + # There are all sorts of exceptions that can occur here + # (i.e. permissions, non-existent file, etc) + pass + + # Fallback to reading from mountinfo + try: + with open(cls.METADATA_FILE_CGROUPS_V2, 'rb') as f: + for line in f: + stripped = line.decode('utf-8').strip() + match = cls.METADATA_RE_CGROUPS_V2.match(stripped) + if match: + return match.group(1) except: # There are all sorts of exceptions that can occur here # (i.e. permissions, non-existent file, etc) @@ -290,11 +311,7 @@ def get_values(cls, contents): if contents is None: return - value = contents.split('/')[-1] - match = cls.DOCKER_RE.search(value) - if match: - value = match.group(0) - return {'id': value} + return {'id': contents} @classmethod def valid_chars(cls, data): @@ -315,11 +332,7 @@ def valid_length(cls, data): return False # Must be exactly 64 characters - valid = len(data) == 64 - if valid: - return True - - return False + return bool(len(data) == 64) class KubernetesUtilization(CommonUtilization): diff --git a/newrelic/config.py b/newrelic/config.py index 608d59fc3..7779ebc9d 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -564,6 +564,7 @@ def _process_configuration(section): _process_setting(section, "machine_learning.enabled", "getboolean", None) _process_setting(section, "machine_learning.inference_events_value.enabled", "getboolean", None) + _process_setting(section, "package_reporting.enabled", "getboolean", None) # Loading of configuration from specified file and for specified @@ -2477,7 +2478,11 @@ def _process_module_builtin_defaults(): "newrelic.hooks.logger_structlog", "instrument_structlog__base", ) - + _process_module_definition( + "structlog._frames", + "newrelic.hooks.logger_structlog", + "instrument_structlog__frames", + ) _process_module_definition( "paste.httpserver", "newrelic.hooks.adapter_paste", @@ -3952,13 +3957,21 @@ def _process_module_builtin_defaults(): def _process_module_entry_points(): try: - import pkg_resources + # Preferred after Python 3.10 + if sys.version_info >= (3, 10): + from importlib.metadata import entry_points + # Introduced in Python 3.8 + elif sys.version_info >= (3, 8) and sys.version_info <= (3, 9): + from importlib_metadata import entry_points + # Removed in Python 3.12 + else: + from pkg_resources import iter_entry_points as entry_points except ImportError: return group = "newrelic.hooks" - for entrypoint in pkg_resources.iter_entry_points(group=group): + for entrypoint in entry_points(group=group): target = entrypoint.name if target in _module_import_hook_registry: @@ -4016,13 +4029,21 @@ def _setup_instrumentation(): def _setup_extensions(): try: - import pkg_resources + # Preferred after Python 3.10 + if sys.version_info >= (3, 10): + from importlib.metadata import entry_points + # Introduced in Python 3.8 + elif sys.version_info >= (3, 8) and sys.version_info <= (3, 9): + from importlib_metadata import entry_points + # Removed in Python 3.12 + else: + from pkg_resources import iter_entry_points as entry_points except ImportError: return group = "newrelic.extension" - for entrypoint in pkg_resources.iter_entry_points(group=group): + for entrypoint in entry_points(group=group): __import__(entrypoint.module_name) module = sys.modules[entrypoint.module_name] module.initialize() diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 8b366f7d7..5a8306160 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -143,6 +143,10 @@ class MachineLearningInferenceEventsValueSettings(Settings): pass +class PackageReportingSettings(Settings): + pass + + class CodeLevelMetricsSettings(Settings): pass @@ -422,6 +426,7 @@ class EventHarvestConfigHarvestLimitSettings(Settings): _settings.application_logging.metrics = ApplicationLoggingMetricsSettings() _settings.machine_learning = MachineLearningSettings() _settings.machine_learning.inference_events_value = MachineLearningInferenceEventsValueSettings() +_settings.package_reporting = PackageReportingSettings() _settings.attributes = AttributesSettings() _settings.browser_monitoring = BrowserMonitorSettings() _settings.browser_monitoring.attributes = BrowserMonitorAttributesSettings() @@ -927,6 +932,7 @@ def default_otlp_host(host): _settings.machine_learning.inference_events_value.enabled = _environ_as_bool( "NEW_RELIC_MACHINE_LEARNING_INFERENCE_EVENT_VALUE_ENABLED", default=False ) +_settings.package_reporting.enabled = _environ_as_bool("NEW_RELIC_PACKAGE_REPORTING_ENABLED", default=True) _settings.security.agent.enabled = _environ_as_bool("NEW_RELIC_SECURITY_AGENT_ENABLED", False) _settings.security.enabled = _environ_as_bool("NEW_RELIC_SECURITY_ENABLED", False) diff --git a/newrelic/core/environment.py b/newrelic/core/environment.py index 9bca085a3..6d24eced5 100644 --- a/newrelic/core/environment.py +++ b/newrelic/core/environment.py @@ -29,6 +29,7 @@ physical_processor_count, total_physical_memory, ) +from newrelic.core.config import global_settings from newrelic.packages.isort import stdlibs as isort_stdlibs try: @@ -202,44 +203,46 @@ def environment_settings(): plugins = [] - # Using any iterable to create a snapshot of sys.modules can occassionally - # fail in a rare case when modules are imported in parallel by different - # threads. - # - # TL;DR: Do NOT use an iterable on the original sys.modules to generate the - # list - for name, module in sys.modules.copy().items(): - # Exclude lib.sub_paths as independent modules except for newrelic.hooks. - nr_hook = name.startswith("newrelic.hooks.") - if "." in name and not nr_hook or name.startswith("_"): - continue - - # If the module isn't actually loaded (such as failed relative imports - # in Python 2.7), the module will be None and should not be reported. - try: - if not module: + settings = global_settings() + if settings and settings.package_reporting.enabled: + # Using any iterable to create a snapshot of sys.modules can occassionally + # fail in a rare case when modules are imported in parallel by different + # threads. + # + # TL;DR: Do NOT use an iterable on the original sys.modules to generate the + # list + for name, module in sys.modules.copy().items(): + # Exclude lib.sub_paths as independent modules except for newrelic.hooks. + nr_hook = name.startswith("newrelic.hooks.") + if "." in name and not nr_hook or name.startswith("_"): continue - except Exception: - # if the application uses generalimport to manage optional depedencies, - # it's possible that generalimport.MissingOptionalDependency is raised. - # In this case, we should not report the module as it is not actually loaded and - # is not a runtime dependency of the application. - # - continue - - # Exclude standard library/built-in modules. - if name in stdlib_builtin_module_names: - continue - - try: - version = get_package_version(name) - except Exception: - version = None - - # If it has no version it's likely not a real package so don't report it unless - # it's a new relic hook. - if version or nr_hook: - plugins.append("%s (%s)" % (name, version)) + + # If the module isn't actually loaded (such as failed relative imports + # in Python 2.7), the module will be None and should not be reported. + try: + if not module: + continue + except Exception: + # if the application uses generalimport to manage optional depedencies, + # it's possible that generalimport.MissingOptionalDependency is raised. + # In this case, we should not report the module as it is not actually loaded and + # is not a runtime dependency of the application. + # + continue + + # Exclude standard library/built-in modules. + if name in stdlib_builtin_module_names: + continue + + try: + version = get_package_version(name) + except Exception: + version = None + + # If it has no version it's likely not a real package so don't report it unless + # it's a new relic hook. + if version or nr_hook: + plugins.append("%s (%s)" % (name, version)) env.append(("Plugin List", plugins)) diff --git a/newrelic/core/transaction_node.py b/newrelic/core/transaction_node.py index d63d7f9b6..74216f7df 100644 --- a/newrelic/core/transaction_node.py +++ b/newrelic/core/transaction_node.py @@ -22,6 +22,7 @@ import newrelic.core.error_collector import newrelic.core.trace_node +from newrelic.common.encoding_utils import camel_case from newrelic.common.streaming_utils import SpanProtoAttrs from newrelic.core.attribute import create_agent_attributes, create_user_attributes from newrelic.core.attribute_filter import ( @@ -76,6 +77,10 @@ "synthetics_job_id", "synthetics_monitor_id", "synthetics_header", + "synthetics_type", + "synthetics_initiator", + "synthetics_attributes", + "synthetics_info_header", "is_part_of_cat", "trip_id", "path_hash", @@ -586,6 +591,15 @@ def _event_intrinsics(self, stats_table): intrinsics["nr.syntheticsJobId"] = self.synthetics_job_id intrinsics["nr.syntheticsMonitorId"] = self.synthetics_monitor_id + if self.synthetics_type: + intrinsics["nr.syntheticsType"] = self.synthetics_type + intrinsics["nr.syntheticsInitiator"] = self.synthetics_initiator + if self.synthetics_attributes: + # Add all synthetics attributes + for k, v in self.synthetics_attributes.items(): + if k: + intrinsics["nr.synthetics%s" % camel_case(k, upper=True)] = v + def _add_call_time(source, target): # include time for keys previously added to stats table via # stats_engine.record_transaction diff --git a/newrelic/hooks/framework_pyramid.py b/newrelic/hooks/framework_pyramid.py index 996ebb372..ba5e5e07a 100644 --- a/newrelic/hooks/framework_pyramid.py +++ b/newrelic/hooks/framework_pyramid.py @@ -53,17 +53,11 @@ wrap_function_wrapper, wrap_out_function, ) +from newrelic.common.package_version_utils import get_package_version def instrument_pyramid_router(module): - pyramid_version = None - - try: - import pkg_resources - - pyramid_version = pkg_resources.get_distribution("pyramid").version - except Exception: - pass + pyramid_version = get_package_version("pyramid") wrap_wsgi_application(module, "Router.__call__", framework=("Pyramid", pyramid_version)) diff --git a/newrelic/hooks/logger_structlog.py b/newrelic/hooks/logger_structlog.py index e652a795c..06d13aa4a 100644 --- a/newrelic/hooks/logger_structlog.py +++ b/newrelic/hooks/logger_structlog.py @@ -17,6 +17,7 @@ from newrelic.core.config import global_settings from newrelic.api.application import application_instance from newrelic.hooks.logger_logging import add_nr_linking_metadata +from newrelic.common.signature import bind_args def normalize_level_name(method_name): @@ -81,6 +82,25 @@ def wrap__process_event(wrapped, instance, args, kwargs): return wrapped(*args, **kwargs) +def wrap__find_first_app_frame_and_name(wrapped, instance, args, kwargs): + try: + bound_args = bind_args(wrapped, args, kwargs) + if bound_args["additional_ignores"]: + bound_args["additional_ignores"] = list(bound_args["additional_ignores"]) + bound_args["additional_ignores"].append("newrelic") + else: + bound_args["additional_ignores"] = ["newrelic"] + except Exception: + return wrapped(*args, **kwargs) + + return wrapped(**bound_args) + + def instrument_structlog__base(module): if hasattr(module, "BoundLoggerBase") and hasattr(module.BoundLoggerBase, "_process_event"): wrap_function_wrapper(module, "BoundLoggerBase._process_event", wrap__process_event) + + +def instrument_structlog__frames(module): + if hasattr(module, "_find_first_app_frame_and_name"): + wrap_function_wrapper(module, "_find_first_app_frame_and_name", wrap__find_first_app_frame_and_name) diff --git a/newrelic/hooks/messagebroker_pika.py b/newrelic/hooks/messagebroker_pika.py index d6120c10d..5396e3807 100644 --- a/newrelic/hooks/messagebroker_pika.py +++ b/newrelic/hooks/messagebroker_pika.py @@ -278,7 +278,7 @@ def _generator(generator): if any(exc): to_throw = exc exc = (None, None, None) - yielded = generator.throw(*to_throw) + yielded = generator.throw(to_throw[1]) else: yielded = generator.send(value) diff --git a/newrelic/packages/wrapt/__init__.py b/newrelic/packages/wrapt/__init__.py index ee6539b77..ed31a9431 100644 --- a/newrelic/packages/wrapt/__init__.py +++ b/newrelic/packages/wrapt/__init__.py @@ -1,12 +1,15 @@ -__version_info__ = ('1', '14', '1') +__version_info__ = ('1', '16', '0') __version__ = '.'.join(__version_info__) -from .wrappers import (ObjectProxy, CallableObjectProxy, FunctionWrapper, - BoundFunctionWrapper, WeakFunctionProxy, PartialCallableObjectProxy, - resolve_path, apply_patch, wrap_object, wrap_object_attribute, +from .__wrapt__ import (ObjectProxy, CallableObjectProxy, FunctionWrapper, + BoundFunctionWrapper, PartialCallableObjectProxy) + +from .patches import (resolve_path, apply_patch, wrap_object, wrap_object_attribute, function_wrapper, wrap_function_wrapper, patch_function_wrapper, transient_function_wrapper) +from .weakrefs import WeakFunctionProxy + from .decorators import (adapter_factory, AdapterFactory, decorator, synchronized) diff --git a/newrelic/packages/wrapt/__wrapt__.py b/newrelic/packages/wrapt/__wrapt__.py new file mode 100644 index 000000000..9933b2c97 --- /dev/null +++ b/newrelic/packages/wrapt/__wrapt__.py @@ -0,0 +1,14 @@ +import os + +from .wrappers import (ObjectProxy, CallableObjectProxy, + PartialCallableObjectProxy, FunctionWrapper, + BoundFunctionWrapper, _FunctionWrapperBase) + +try: + if not os.environ.get('WRAPT_DISABLE_EXTENSIONS'): + from ._wrappers import (ObjectProxy, CallableObjectProxy, + PartialCallableObjectProxy, FunctionWrapper, + BoundFunctionWrapper, _FunctionWrapperBase) + +except ImportError: + pass diff --git a/newrelic/packages/wrapt/_wrappers.c b/newrelic/packages/wrapt/_wrappers.c index 67c5d5e1a..e0e1b5bc6 100644 --- a/newrelic/packages/wrapt/_wrappers.c +++ b/newrelic/packages/wrapt/_wrappers.c @@ -1139,6 +1139,30 @@ static int WraptObjectProxy_setitem(WraptObjectProxyObject *self, /* ------------------------------------------------------------------------- */ +static PyObject *WraptObjectProxy_self_setattr( + WraptObjectProxyObject *self, PyObject *args) +{ + PyObject *name = NULL; + PyObject *value = NULL; + +#if PY_MAJOR_VERSION >= 3 + if (!PyArg_ParseTuple(args, "UO:__self_setattr__", &name, &value)) + return NULL; +#else + if (!PyArg_ParseTuple(args, "SO:__self_setattr__", &name, &value)) + return NULL; +#endif + + if (PyObject_GenericSetAttr((PyObject *)self, name, value) != 0) { + return NULL; + } + + Py_INCREF(Py_None); + return Py_None; +} + +/* ------------------------------------------------------------------------- */ + static PyObject *WraptObjectProxy_dir( WraptObjectProxyObject *self, PyObject *args) { @@ -1464,6 +1488,19 @@ static PyObject *WraptObjectProxy_get_class( /* ------------------------------------------------------------------------- */ +static int WraptObjectProxy_set_class(WraptObjectProxyObject *self, + PyObject *value) +{ + if (!self->wrapped) { + PyErr_SetString(PyExc_ValueError, "wrapper has not been initialized"); + return -1; + } + + return PyObject_SetAttrString(self->wrapped, "__class__", value); +} + +/* ------------------------------------------------------------------------- */ + static PyObject *WraptObjectProxy_get_annotations( WraptObjectProxyObject *self) { @@ -1535,6 +1572,9 @@ static PyObject *WraptObjectProxy_getattro( if (object) return object; + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + return NULL; + PyErr_Clear(); if (!getattr_str) { @@ -1738,6 +1778,8 @@ static PyMappingMethods WraptObjectProxy_as_mapping = { }; static PyMethodDef WraptObjectProxy_methods[] = { + { "__self_setattr__", (PyCFunction)WraptObjectProxy_self_setattr, + METH_VARARGS , 0 }, { "__dir__", (PyCFunction)WraptObjectProxy_dir, METH_NOARGS, 0 }, { "__enter__", (PyCFunction)WraptObjectProxy_enter, METH_VARARGS | METH_KEYWORDS, 0 }, @@ -1776,7 +1818,7 @@ static PyGetSetDef WraptObjectProxy_getset[] = { { "__doc__", (getter)WraptObjectProxy_get_doc, (setter)WraptObjectProxy_set_doc, 0 }, { "__class__", (getter)WraptObjectProxy_get_class, - NULL, 0 }, + (setter)WraptObjectProxy_set_class, 0 }, { "__annotations__", (getter)WraptObjectProxy_get_annotations, (setter)WraptObjectProxy_set_annotations, 0 }, { "__wrapped__", (getter)WraptObjectProxy_get_wrapped, @@ -2547,7 +2589,6 @@ static PyObject *WraptFunctionWrapperBase_set_name( static PyObject *WraptFunctionWrapperBase_instancecheck( WraptFunctionWrapperObject *self, PyObject *instance) { - PyObject *object = NULL; PyObject *result = NULL; int check = 0; diff --git a/newrelic/packages/wrapt/decorators.py b/newrelic/packages/wrapt/decorators.py index c3f254729..c80a4bb72 100644 --- a/newrelic/packages/wrapt/decorators.py +++ b/newrelic/packages/wrapt/decorators.py @@ -41,7 +41,7 @@ def exec_(_code_, _globs_=None, _locs_=None): except ImportError: pass -from .wrappers import (FunctionWrapper, BoundFunctionWrapper, ObjectProxy, +from .__wrapt__ import (FunctionWrapper, BoundFunctionWrapper, ObjectProxy, CallableObjectProxy) # Adapter wrapper for the wrapped function which will overlay certain diff --git a/newrelic/packages/wrapt/importer.py b/newrelic/packages/wrapt/importer.py index 5c4d4cc66..23fcbd2f6 100644 --- a/newrelic/packages/wrapt/importer.py +++ b/newrelic/packages/wrapt/importer.py @@ -15,7 +15,7 @@ string_types = str, from importlib.util import find_spec -from .decorators import synchronized +from .__wrapt__ import ObjectProxy # The dictionary registering any post import hooks to be triggered once # the target module has been imported. Once a module has been imported @@ -45,7 +45,6 @@ def import_hook(module): return callback(module) return import_hook -@synchronized(_post_import_hooks_lock) def register_post_import_hook(hook, name): # Create a deferred import hook if hook is a string name rather than # a callable function. @@ -53,51 +52,32 @@ def register_post_import_hook(hook, name): if isinstance(hook, string_types): hook = _create_import_hook_from_string(hook) - # Automatically install the import hook finder if it has not already - # been installed. + with _post_import_hooks_lock: + # Automatically install the import hook finder if it has not already + # been installed. - global _post_import_hooks_init + global _post_import_hooks_init - if not _post_import_hooks_init: - _post_import_hooks_init = True - sys.meta_path.insert(0, ImportHookFinder()) + if not _post_import_hooks_init: + _post_import_hooks_init = True + sys.meta_path.insert(0, ImportHookFinder()) - # Determine if any prior registration of a post import hook for - # the target modules has occurred and act appropriately. - - hooks = _post_import_hooks.get(name, None) - - if hooks is None: - # No prior registration of post import hooks for the target - # module. We need to check whether the module has already been - # imported. If it has we fire the hook immediately and add an - # empty list to the registry to indicate that the module has - # already been imported and hooks have fired. Otherwise add - # the post import hook to the registry. + # Check if the module is already imported. If not, register the hook + # to be called after import. module = sys.modules.get(name, None) - if module is not None: - _post_import_hooks[name] = [] - hook(module) - - else: - _post_import_hooks[name] = [hook] + if module is None: + _post_import_hooks.setdefault(name, []).append(hook) - elif hooks == []: - # A prior registration of port import hooks for the target - # module was done and the hooks already fired. Fire the hook - # immediately. + # If the module is already imported, we fire the hook right away. Note that + # the hook is called outside of the lock to avoid deadlocks if code run as a + # consequence of calling the module import hook in turn triggers a separate + # thread which tries to register an import hook. - module = sys.modules[name] + if module is not None: hook(module) - else: - # A prior registration of port import hooks for the target - # module was done but the module has not yet been imported. - - _post_import_hooks[name].append(hook) - # Register post import hooks defined as package entry points. def _create_import_hook_from_entrypoint(entrypoint): @@ -124,16 +104,18 @@ def discover_post_import_hooks(group): # exception is raised in any of the post import hooks, that will cause # the import of the target module to fail. -@synchronized(_post_import_hooks_lock) def notify_module_loaded(module): name = getattr(module, '__name__', None) - hooks = _post_import_hooks.get(name, None) - if hooks: - _post_import_hooks[name] = [] + with _post_import_hooks_lock: + hooks = _post_import_hooks.pop(name, ()) - for hook in hooks: - hook(module) + # Note that the hook is called outside of the lock to avoid deadlocks if + # code run as a consequence of calling the module import hook in turn + # triggers a separate thread which tries to register an import hook. + + for hook in hooks: + hook(module) # A custom module import finder. This intercepts attempts to import # modules and watches out for attempts to import target modules of @@ -148,20 +130,45 @@ def load_module(self, fullname): return module -class _ImportHookChainedLoader: +class _ImportHookChainedLoader(ObjectProxy): def __init__(self, loader): - self.loader = loader + super(_ImportHookChainedLoader, self).__init__(loader) if hasattr(loader, "load_module"): - self.load_module = self._load_module + self.__self_setattr__('load_module', self._self_load_module) if hasattr(loader, "create_module"): - self.create_module = self._create_module + self.__self_setattr__('create_module', self._self_create_module) if hasattr(loader, "exec_module"): - self.exec_module = self._exec_module - - def _load_module(self, fullname): - module = self.loader.load_module(fullname) + self.__self_setattr__('exec_module', self._self_exec_module) + + def _self_set_loader(self, module): + # Set module's loader to self.__wrapped__ unless it's already set to + # something else. Import machinery will set it to spec.loader if it is + # None, so handle None as well. The module may not support attribute + # assignment, in which case we simply skip it. Note that we also deal + # with __loader__ not existing at all. This is to future proof things + # due to proposal to remove the attribue as described in the GitHub + # issue at https://github.com/python/cpython/issues/77458. Also prior + # to Python 3.3, the __loader__ attribute was only set if a custom + # module loader was used. It isn't clear whether the attribute still + # existed in that case or was set to None. + + class UNDEFINED: pass + + if getattr(module, "__loader__", UNDEFINED) in (None, self): + try: + module.__loader__ = self.__wrapped__ + except AttributeError: + pass + + if (getattr(module, "__spec__", None) is not None + and getattr(module.__spec__, "loader", None) is self): + module.__spec__.loader = self.__wrapped__ + + def _self_load_module(self, fullname): + module = self.__wrapped__.load_module(fullname) + self._self_set_loader(module) notify_module_loaded(module) return module @@ -169,11 +176,12 @@ def _load_module(self, fullname): # Python 3.4 introduced create_module() and exec_module() instead of # load_module() alone. Splitting the two steps. - def _create_module(self, spec): - return self.loader.create_module(spec) + def _self_create_module(self, spec): + return self.__wrapped__.create_module(spec) - def _exec_module(self, module): - self.loader.exec_module(module) + def _self_exec_module(self, module): + self._self_set_loader(module) + self.__wrapped__.exec_module(module) notify_module_loaded(module) class ImportHookFinder: @@ -181,14 +189,14 @@ class ImportHookFinder: def __init__(self): self.in_progress = {} - @synchronized(_post_import_hooks_lock) def find_module(self, fullname, path=None): # If the module being imported is not one we have registered # post import hooks for, we can return immediately. We will # take no further part in the importing of this module. - if not fullname in _post_import_hooks: - return None + with _post_import_hooks_lock: + if fullname not in _post_import_hooks: + return None # When we are interested in a specific module, we will call back # into the import system a second time to defer to the import @@ -244,8 +252,9 @@ def find_spec(self, fullname, path=None, target=None): # post import hooks for, we can return immediately. We will # take no further part in the importing of this module. - if not fullname in _post_import_hooks: - return None + with _post_import_hooks_lock: + if fullname not in _post_import_hooks: + return None # When we are interested in a specific module, we will call back # into the import system a second time to defer to the import diff --git a/newrelic/packages/wrapt/patches.py b/newrelic/packages/wrapt/patches.py new file mode 100644 index 000000000..e22adf7ca --- /dev/null +++ b/newrelic/packages/wrapt/patches.py @@ -0,0 +1,141 @@ +import inspect +import sys + +PY2 = sys.version_info[0] == 2 + +if PY2: + string_types = basestring, +else: + string_types = str, + +from .__wrapt__ import FunctionWrapper + +# Helper functions for applying wrappers to existing functions. + +def resolve_path(module, name): + if isinstance(module, string_types): + __import__(module) + module = sys.modules[module] + + parent = module + + path = name.split('.') + attribute = path[0] + + # We can't just always use getattr() because in doing + # that on a class it will cause binding to occur which + # will complicate things later and cause some things not + # to work. For the case of a class we therefore access + # the __dict__ directly. To cope though with the wrong + # class being given to us, or a method being moved into + # a base class, we need to walk the class hierarchy to + # work out exactly which __dict__ the method was defined + # in, as accessing it from __dict__ will fail if it was + # not actually on the class given. Fallback to using + # getattr() if we can't find it. If it truly doesn't + # exist, then that will fail. + + def lookup_attribute(parent, attribute): + if inspect.isclass(parent): + for cls in inspect.getmro(parent): + if attribute in vars(cls): + return vars(cls)[attribute] + else: + return getattr(parent, attribute) + else: + return getattr(parent, attribute) + + original = lookup_attribute(parent, attribute) + + for attribute in path[1:]: + parent = original + original = lookup_attribute(parent, attribute) + + return (parent, attribute, original) + +def apply_patch(parent, attribute, replacement): + setattr(parent, attribute, replacement) + +def wrap_object(module, name, factory, args=(), kwargs={}): + (parent, attribute, original) = resolve_path(module, name) + wrapper = factory(original, *args, **kwargs) + apply_patch(parent, attribute, wrapper) + return wrapper + +# Function for applying a proxy object to an attribute of a class +# instance. The wrapper works by defining an attribute of the same name +# on the class which is a descriptor and which intercepts access to the +# instance attribute. Note that this cannot be used on attributes which +# are themselves defined by a property object. + +class AttributeWrapper(object): + + def __init__(self, attribute, factory, args, kwargs): + self.attribute = attribute + self.factory = factory + self.args = args + self.kwargs = kwargs + + def __get__(self, instance, owner): + value = instance.__dict__[self.attribute] + return self.factory(value, *self.args, **self.kwargs) + + def __set__(self, instance, value): + instance.__dict__[self.attribute] = value + + def __delete__(self, instance): + del instance.__dict__[self.attribute] + +def wrap_object_attribute(module, name, factory, args=(), kwargs={}): + path, attribute = name.rsplit('.', 1) + parent = resolve_path(module, path)[2] + wrapper = AttributeWrapper(attribute, factory, args, kwargs) + apply_patch(parent, attribute, wrapper) + return wrapper + +# Functions for creating a simple decorator using a FunctionWrapper, +# plus short cut functions for applying wrappers to functions. These are +# for use when doing monkey patching. For a more featured way of +# creating decorators see the decorator decorator instead. + +def function_wrapper(wrapper): + def _wrapper(wrapped, instance, args, kwargs): + target_wrapped = args[0] + if instance is None: + target_wrapper = wrapper + elif inspect.isclass(instance): + target_wrapper = wrapper.__get__(None, instance) + else: + target_wrapper = wrapper.__get__(instance, type(instance)) + return FunctionWrapper(target_wrapped, target_wrapper) + return FunctionWrapper(wrapper, _wrapper) + +def wrap_function_wrapper(module, name, wrapper): + return wrap_object(module, name, FunctionWrapper, (wrapper,)) + +def patch_function_wrapper(module, name, enabled=None): + def _wrapper(wrapper): + return wrap_object(module, name, FunctionWrapper, (wrapper, enabled)) + return _wrapper + +def transient_function_wrapper(module, name): + def _decorator(wrapper): + def _wrapper(wrapped, instance, args, kwargs): + target_wrapped = args[0] + if instance is None: + target_wrapper = wrapper + elif inspect.isclass(instance): + target_wrapper = wrapper.__get__(None, instance) + else: + target_wrapper = wrapper.__get__(instance, type(instance)) + def _execute(wrapped, instance, args, kwargs): + (parent, attribute, original) = resolve_path(module, name) + replacement = FunctionWrapper(original, target_wrapper) + setattr(parent, attribute, replacement) + try: + return wrapped(*args, **kwargs) + finally: + setattr(parent, attribute, original) + return FunctionWrapper(target_wrapped, _execute) + return FunctionWrapper(wrapper, _wrapper) + return _decorator diff --git a/newrelic/packages/wrapt/weakrefs.py b/newrelic/packages/wrapt/weakrefs.py new file mode 100644 index 000000000..f931b60d5 --- /dev/null +++ b/newrelic/packages/wrapt/weakrefs.py @@ -0,0 +1,98 @@ +import functools +import weakref + +from .__wrapt__ import ObjectProxy, _FunctionWrapperBase + +# A weak function proxy. This will work on instance methods, class +# methods, static methods and regular functions. Special treatment is +# needed for the method types because the bound method is effectively a +# transient object and applying a weak reference to one will immediately +# result in it being destroyed and the weakref callback called. The weak +# reference is therefore applied to the instance the method is bound to +# and the original function. The function is then rebound at the point +# of a call via the weak function proxy. + +def _weak_function_proxy_callback(ref, proxy, callback): + if proxy._self_expired: + return + + proxy._self_expired = True + + # This could raise an exception. We let it propagate back and let + # the weakref.proxy() deal with it, at which point it generally + # prints out a short error message direct to stderr and keeps going. + + if callback is not None: + callback(proxy) + +class WeakFunctionProxy(ObjectProxy): + + __slots__ = ('_self_expired', '_self_instance') + + def __init__(self, wrapped, callback=None): + # We need to determine if the wrapped function is actually a + # bound method. In the case of a bound method, we need to keep a + # reference to the original unbound function and the instance. + # This is necessary because if we hold a reference to the bound + # function, it will be the only reference and given it is a + # temporary object, it will almost immediately expire and + # the weakref callback triggered. So what is done is that we + # hold a reference to the instance and unbound function and + # when called bind the function to the instance once again and + # then call it. Note that we avoid using a nested function for + # the callback here so as not to cause any odd reference cycles. + + _callback = callback and functools.partial( + _weak_function_proxy_callback, proxy=self, + callback=callback) + + self._self_expired = False + + if isinstance(wrapped, _FunctionWrapperBase): + self._self_instance = weakref.ref(wrapped._self_instance, + _callback) + + if wrapped._self_parent is not None: + super(WeakFunctionProxy, self).__init__( + weakref.proxy(wrapped._self_parent, _callback)) + + else: + super(WeakFunctionProxy, self).__init__( + weakref.proxy(wrapped, _callback)) + + return + + try: + self._self_instance = weakref.ref(wrapped.__self__, _callback) + + super(WeakFunctionProxy, self).__init__( + weakref.proxy(wrapped.__func__, _callback)) + + except AttributeError: + self._self_instance = None + + super(WeakFunctionProxy, self).__init__( + weakref.proxy(wrapped, _callback)) + + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + + # We perform a boolean check here on the instance and wrapped + # function as that will trigger the reference error prior to + # calling if the reference had expired. + + instance = self._self_instance and self._self_instance() + function = self.__wrapped__ and self.__wrapped__ + + # If the wrapped function was originally a bound function, for + # which we retained a reference to the instance and the unbound + # function we need to rebind the function and then call it. If + # not just called the wrapped function. + + if instance is None: + return self.__wrapped__(*args, **kwargs) + + return function.__get__(instance, type(instance))(*args, **kwargs) diff --git a/newrelic/packages/wrapt/wrappers.py b/newrelic/packages/wrapt/wrappers.py index 2716cd1da..dfc3440db 100644 --- a/newrelic/packages/wrapt/wrappers.py +++ b/newrelic/packages/wrapt/wrappers.py @@ -1,8 +1,5 @@ -import os import sys -import functools import operator -import weakref import inspect PY2 = sys.version_info[0] == 2 @@ -94,6 +91,9 @@ def __init__(self, wrapped): except AttributeError: pass + def __self_setattr__(self, name, value): + object.__setattr__(self, name, value) + @property def __name__(self): return self.__wrapped__.__name__ @@ -445,12 +445,22 @@ def __reduce_ex__(self, protocol): class CallableObjectProxy(ObjectProxy): - def __call__(self, *args, **kwargs): + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + return self.__wrapped__(*args, **kwargs) class PartialCallableObjectProxy(ObjectProxy): - def __init__(self, *args, **kwargs): + def __init__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + if len(args) < 1: raise TypeError('partial type takes at least one argument') @@ -464,7 +474,12 @@ def __init__(self, *args, **kwargs): self._self_args = args self._self_kwargs = kwargs - def __call__(self, *args, **kwargs): + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + _args = self._self_args + args _kwargs = dict(self._self_kwargs) @@ -544,7 +559,12 @@ def __get__(self, instance, owner): return self - def __call__(self, *args, **kwargs): + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + # If enabled has been specified, then evaluate it at this point # and if the wrapper is not to be executed, then simply return # the bound function rather than a bound wrapper for the bound @@ -607,7 +627,12 @@ def __subclasscheck__(self, subclass): class BoundFunctionWrapper(_FunctionWrapperBase): - def __call__(self, *args, **kwargs): + def __call__(*args, **kwargs): + def _unpack_self(self, *args): + return self, args + + self, args = _unpack_self(*args) + # If enabled has been specified, then evaluate it at this point # and if the wrapper is not to be executed, then simply return # the bound function rather than a bound wrapper for the bound @@ -757,230 +782,3 @@ def __init__(self, wrapped, wrapper, enabled=None): super(FunctionWrapper, self).__init__(wrapped, None, wrapper, enabled, binding) - -try: - if not os.environ.get('WRAPT_DISABLE_EXTENSIONS'): - from ._wrappers import (ObjectProxy, CallableObjectProxy, - PartialCallableObjectProxy, FunctionWrapper, - BoundFunctionWrapper, _FunctionWrapperBase) -except ImportError: - pass - -# Helper functions for applying wrappers to existing functions. - -def resolve_path(module, name): - if isinstance(module, string_types): - __import__(module) - module = sys.modules[module] - - parent = module - - path = name.split('.') - attribute = path[0] - - # We can't just always use getattr() because in doing - # that on a class it will cause binding to occur which - # will complicate things later and cause some things not - # to work. For the case of a class we therefore access - # the __dict__ directly. To cope though with the wrong - # class being given to us, or a method being moved into - # a base class, we need to walk the class hierarchy to - # work out exactly which __dict__ the method was defined - # in, as accessing it from __dict__ will fail if it was - # not actually on the class given. Fallback to using - # getattr() if we can't find it. If it truly doesn't - # exist, then that will fail. - - def lookup_attribute(parent, attribute): - if inspect.isclass(parent): - for cls in inspect.getmro(parent): - if attribute in vars(cls): - return vars(cls)[attribute] - else: - return getattr(parent, attribute) - else: - return getattr(parent, attribute) - - original = lookup_attribute(parent, attribute) - - for attribute in path[1:]: - parent = original - original = lookup_attribute(parent, attribute) - - return (parent, attribute, original) - -def apply_patch(parent, attribute, replacement): - setattr(parent, attribute, replacement) - -def wrap_object(module, name, factory, args=(), kwargs={}): - (parent, attribute, original) = resolve_path(module, name) - wrapper = factory(original, *args, **kwargs) - apply_patch(parent, attribute, wrapper) - return wrapper - -# Function for applying a proxy object to an attribute of a class -# instance. The wrapper works by defining an attribute of the same name -# on the class which is a descriptor and which intercepts access to the -# instance attribute. Note that this cannot be used on attributes which -# are themselves defined by a property object. - -class AttributeWrapper(object): - - def __init__(self, attribute, factory, args, kwargs): - self.attribute = attribute - self.factory = factory - self.args = args - self.kwargs = kwargs - - def __get__(self, instance, owner): - value = instance.__dict__[self.attribute] - return self.factory(value, *self.args, **self.kwargs) - - def __set__(self, instance, value): - instance.__dict__[self.attribute] = value - - def __delete__(self, instance): - del instance.__dict__[self.attribute] - -def wrap_object_attribute(module, name, factory, args=(), kwargs={}): - path, attribute = name.rsplit('.', 1) - parent = resolve_path(module, path)[2] - wrapper = AttributeWrapper(attribute, factory, args, kwargs) - apply_patch(parent, attribute, wrapper) - return wrapper - -# Functions for creating a simple decorator using a FunctionWrapper, -# plus short cut functions for applying wrappers to functions. These are -# for use when doing monkey patching. For a more featured way of -# creating decorators see the decorator decorator instead. - -def function_wrapper(wrapper): - def _wrapper(wrapped, instance, args, kwargs): - target_wrapped = args[0] - if instance is None: - target_wrapper = wrapper - elif inspect.isclass(instance): - target_wrapper = wrapper.__get__(None, instance) - else: - target_wrapper = wrapper.__get__(instance, type(instance)) - return FunctionWrapper(target_wrapped, target_wrapper) - return FunctionWrapper(wrapper, _wrapper) - -def wrap_function_wrapper(module, name, wrapper): - return wrap_object(module, name, FunctionWrapper, (wrapper,)) - -def patch_function_wrapper(module, name): - def _wrapper(wrapper): - return wrap_object(module, name, FunctionWrapper, (wrapper,)) - return _wrapper - -def transient_function_wrapper(module, name): - def _decorator(wrapper): - def _wrapper(wrapped, instance, args, kwargs): - target_wrapped = args[0] - if instance is None: - target_wrapper = wrapper - elif inspect.isclass(instance): - target_wrapper = wrapper.__get__(None, instance) - else: - target_wrapper = wrapper.__get__(instance, type(instance)) - def _execute(wrapped, instance, args, kwargs): - (parent, attribute, original) = resolve_path(module, name) - replacement = FunctionWrapper(original, target_wrapper) - setattr(parent, attribute, replacement) - try: - return wrapped(*args, **kwargs) - finally: - setattr(parent, attribute, original) - return FunctionWrapper(target_wrapped, _execute) - return FunctionWrapper(wrapper, _wrapper) - return _decorator - -# A weak function proxy. This will work on instance methods, class -# methods, static methods and regular functions. Special treatment is -# needed for the method types because the bound method is effectively a -# transient object and applying a weak reference to one will immediately -# result in it being destroyed and the weakref callback called. The weak -# reference is therefore applied to the instance the method is bound to -# and the original function. The function is then rebound at the point -# of a call via the weak function proxy. - -def _weak_function_proxy_callback(ref, proxy, callback): - if proxy._self_expired: - return - - proxy._self_expired = True - - # This could raise an exception. We let it propagate back and let - # the weakref.proxy() deal with it, at which point it generally - # prints out a short error message direct to stderr and keeps going. - - if callback is not None: - callback(proxy) - -class WeakFunctionProxy(ObjectProxy): - - __slots__ = ('_self_expired', '_self_instance') - - def __init__(self, wrapped, callback=None): - # We need to determine if the wrapped function is actually a - # bound method. In the case of a bound method, we need to keep a - # reference to the original unbound function and the instance. - # This is necessary because if we hold a reference to the bound - # function, it will be the only reference and given it is a - # temporary object, it will almost immediately expire and - # the weakref callback triggered. So what is done is that we - # hold a reference to the instance and unbound function and - # when called bind the function to the instance once again and - # then call it. Note that we avoid using a nested function for - # the callback here so as not to cause any odd reference cycles. - - _callback = callback and functools.partial( - _weak_function_proxy_callback, proxy=self, - callback=callback) - - self._self_expired = False - - if isinstance(wrapped, _FunctionWrapperBase): - self._self_instance = weakref.ref(wrapped._self_instance, - _callback) - - if wrapped._self_parent is not None: - super(WeakFunctionProxy, self).__init__( - weakref.proxy(wrapped._self_parent, _callback)) - - else: - super(WeakFunctionProxy, self).__init__( - weakref.proxy(wrapped, _callback)) - - return - - try: - self._self_instance = weakref.ref(wrapped.__self__, _callback) - - super(WeakFunctionProxy, self).__init__( - weakref.proxy(wrapped.__func__, _callback)) - - except AttributeError: - self._self_instance = None - - super(WeakFunctionProxy, self).__init__( - weakref.proxy(wrapped, _callback)) - - def __call__(self, *args, **kwargs): - # We perform a boolean check here on the instance and wrapped - # function as that will trigger the reference error prior to - # calling if the reference had expired. - - instance = self._self_instance and self._self_instance() - function = self.__wrapped__ and self.__wrapped__ - - # If the wrapped function was originally a bound function, for - # which we retained a reference to the instance and the unbound - # function we need to rebind the function and then call it. If - # not just called the wrapped function. - - if instance is None: - return self.__wrapped__(*args, **kwargs) - - return function.__get__(instance, type(instance))(*args, **kwargs) diff --git a/setup.py b/setup.py index b351ae06d..8ab4a2d6b 100644 --- a/setup.py +++ b/setup.py @@ -124,6 +124,7 @@ def build_extension(self, ext): "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: System :: Monitoring", diff --git a/tests/adapter_hypercorn/test_hypercorn.py b/tests/adapter_hypercorn/test_hypercorn.py index 8b53eee0a..262f7a031 100644 --- a/tests/adapter_hypercorn/test_hypercorn.py +++ b/tests/adapter_hypercorn/test_hypercorn.py @@ -17,7 +17,6 @@ import time from urllib.request import HTTPError, urlopen -import pkg_resources import pytest from testing_support.fixtures import ( override_application_settings, @@ -39,8 +38,12 @@ from newrelic.api.transaction import ignore_transaction from newrelic.common.object_names import callable_name +from newrelic.common.package_version_utils import ( + get_package_version, + get_package_version_tuple, +) -HYPERCORN_VERSION = tuple(int(v) for v in pkg_resources.get_distribution("hypercorn").version.split(".")) +HYPERCORN_VERSION = get_package_version_tuple("hypercorn") asgi_2_unsupported = HYPERCORN_VERSION >= (0, 14, 1) wsgi_unsupported = HYPERCORN_VERSION < (0, 14, 1) @@ -60,6 +63,7 @@ def wsgi_app(environ, start_response): @pytest.fixture( + scope="session", params=( pytest.param( simple_app_v2_raw, @@ -78,7 +82,7 @@ def app(request): return request.param -@pytest.fixture() +@pytest.fixture(scope="session") def port(loop, app): import hypercorn.asyncio import hypercorn.config @@ -132,7 +136,7 @@ def wait_for_port(port, retries=10): @override_application_settings({"transaction_name.naming_scheme": "framework"}) def test_hypercorn_200(port, app): - hypercorn_version = pkg_resources.get_distribution("hypercorn").version + hypercorn_version = get_package_version("hypercorn") @validate_transaction_metrics( callable_name(app), diff --git a/tests/agent_features/test_configuration.py b/tests/agent_features/test_configuration.py index 1a311e693..a75e30f58 100644 --- a/tests/agent_features/test_configuration.py +++ b/tests/agent_features/test_configuration.py @@ -44,6 +44,7 @@ global_settings_dump, ) +from testing_support.fixtures import override_generic_settings def function_to_trace(): pass @@ -595,6 +596,7 @@ def test_translate_deprecated_ignored_params_with_new_setting(): ("otlp_port", 0), ), ) +@override_generic_settings(global_settings(), {"host": "collector.newrelic.com"}) def test_default_values(name, expected_value): settings = global_settings() value = fetch_config_setting(settings, name) diff --git a/tests/agent_features/test_error_events.py b/tests/agent_features/test_error_events.py index 72bdb14f7..2e648271d 100644 --- a/tests/agent_features/test_error_events.py +++ b/tests/agent_features/test_error_events.py @@ -20,7 +20,7 @@ from testing_support.fixtures import ( cat_enabled, make_cross_agent_headers, - make_synthetics_header, + make_synthetics_headers, override_application_settings, reset_core_stats_engine, validate_error_event_sample_data, @@ -43,6 +43,9 @@ SYNTHETICS_RESOURCE_ID = "09845779-16ef-4fa7-b7f2-44da8e62931c" SYNTHETICS_JOB_ID = "8c7dd3ba-4933-4cbb-b1ed-b62f511782f4" SYNTHETICS_MONITOR_ID = "dc452ae9-1a93-4ab5-8a33-600521e9cd00" +SYNTHETICS_TYPE = "scheduled" +SYNTHETICS_INITIATOR = "graphql" +SYNTHETICS_ATTRIBUTES = {"exampleAttribute": "1"} ERR_MESSAGE = "Transaction had bad value" ERROR = ValueError(ERR_MESSAGE) @@ -135,6 +138,9 @@ def test_transaction_error_cross_agent(): "nr.syntheticsResourceId": SYNTHETICS_RESOURCE_ID, "nr.syntheticsJobId": SYNTHETICS_JOB_ID, "nr.syntheticsMonitorId": SYNTHETICS_MONITOR_ID, + "nr.syntheticsType": SYNTHETICS_TYPE, + "nr.syntheticsInitiator": SYNTHETICS_INITIATOR, + "nr.syntheticsExampleAttribute": "1", } @@ -144,12 +150,15 @@ def test_transaction_error_with_synthetics(): "err_message": ERR_MESSAGE, } settings = application_settings() - headers = make_synthetics_header( + headers = make_synthetics_headers( + settings.encoding_key, settings.trusted_account_ids[0], SYNTHETICS_RESOURCE_ID, SYNTHETICS_JOB_ID, SYNTHETICS_MONITOR_ID, - settings.encoding_key, + SYNTHETICS_TYPE, + SYNTHETICS_INITIATOR, + SYNTHETICS_ATTRIBUTES, ) response = fully_featured_application.get("/", headers=headers, extra_environ=test_environ) diff --git a/tests/agent_features/test_lambda_handler.py b/tests/agent_features/test_lambda_handler.py index 40b694407..69b05fbf8 100644 --- a/tests/agent_features/test_lambda_handler.py +++ b/tests/agent_features/test_lambda_handler.py @@ -100,6 +100,8 @@ class Context(object): memory_limit_in_mb = 128 +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") @pytest.mark.parametrize("is_cold", (False, True)) def test_lambda_transaction_attributes(is_cold, monkeypatch): # setup copies of the attribute lists for this test only @@ -139,6 +141,8 @@ def _test(): _test() +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") @validate_transaction_trace_attributes(_expected_attributes) @validate_transaction_event_attributes(_expected_attributes) @override_application_settings(_override_settings) @@ -193,6 +197,8 @@ def test_lambda_malformed_request_headers(): } +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") @validate_transaction_trace_attributes(_malformed_response_attributes) @validate_transaction_event_attributes(_malformed_response_attributes) @override_application_settings(_override_settings) @@ -229,6 +235,8 @@ def handler(event, context): } +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") @validate_transaction_trace_attributes(_no_status_code_response) @validate_transaction_event_attributes(_no_status_code_response) @override_application_settings(_override_settings) @@ -253,6 +261,8 @@ def handler(event, context): ) +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") @pytest.mark.parametrize("event,arn", ((empty_event, None), (firehose_event, "arn:aws:kinesis:EXAMPLE"))) def test_lambda_event_source_arn_attribute(event, arn): if arn is None: @@ -285,6 +295,8 @@ def _test(): _test() +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") @pytest.mark.parametrize( "api", ( diff --git a/tests/agent_features/test_ml_events.py b/tests/agent_features/test_ml_events.py index 5720224bb..60797375d 100644 --- a/tests/agent_features/test_ml_events.py +++ b/tests/agent_features/test_ml_events.py @@ -15,7 +15,7 @@ import time import pytest -from testing_support.fixtures import ( # function_not_called,; override_application_settings, +from testing_support.fixtures import ( function_not_called, override_application_settings, reset_core_stats_engine, @@ -151,6 +151,7 @@ def test_record_ml_event_outside_transaction_params_not_a_dict(): # Tests for ML Events configuration settings + @override_application_settings({"ml_insights_events.enabled": False}) @reset_core_stats_engine() @validate_ml_event_count(count=0) diff --git a/tests/agent_features/test_serverless_mode.py b/tests/agent_features/test_serverless_mode.py index 189481f70..6114102bf 100644 --- a/tests/agent_features/test_serverless_mode.py +++ b/tests/agent_features/test_serverless_mode.py @@ -151,6 +151,8 @@ def _test_inbound_dt_payload_acceptance(): _test_inbound_dt_payload_acceptance() +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") @pytest.mark.parametrize("arn_set", (True, False)) def test_payload_metadata_arn(serverless_application, arn_set): # If the session object gathers the arn from the settings object before the diff --git a/tests/agent_features/test_synthetics.py b/tests/agent_features/test_synthetics.py index 2e08144cc..350cab03f 100644 --- a/tests/agent_features/test_synthetics.py +++ b/tests/agent_features/test_synthetics.py @@ -17,7 +17,7 @@ from testing_support.external_fixtures import validate_synthetics_external_trace_header from testing_support.fixtures import ( cat_enabled, - make_synthetics_header, + make_synthetics_headers, override_application_settings, ) from testing_support.validators.validate_synthetics_event import ( @@ -37,6 +37,9 @@ SYNTHETICS_RESOURCE_ID = "09845779-16ef-4fa7-b7f2-44da8e62931c" SYNTHETICS_JOB_ID = "8c7dd3ba-4933-4cbb-b1ed-b62f511782f4" SYNTHETICS_MONITOR_ID = "dc452ae9-1a93-4ab5-8a33-600521e9cd00" +SYNTHETICS_TYPE = "scheduled" +SYNTHETICS_INITIATOR = "graphql" +SYNTHETICS_ATTRIBUTES = {"exampleAttribute": "1"} _override_settings = { "encoding_key": ENCODING_KEY, @@ -45,15 +48,19 @@ } -def _make_synthetics_header( +def _make_synthetics_headers( version="1", account_id=ACCOUNT_ID, resource_id=SYNTHETICS_RESOURCE_ID, job_id=SYNTHETICS_JOB_ID, monitor_id=SYNTHETICS_MONITOR_ID, encoding_key=ENCODING_KEY, + info_version="1", + type_=SYNTHETICS_TYPE, + initiator=SYNTHETICS_INITIATOR, + attributes=SYNTHETICS_ATTRIBUTES, ): - return make_synthetics_header(account_id, resource_id, job_id, monitor_id, encoding_key, version) + return make_synthetics_headers(encoding_key, account_id, resource_id, job_id, monitor_id, type_, initiator, attributes, synthetics_version=version, synthetics_info_version=info_version) def decode_header(header, encoding_key=ENCODING_KEY): @@ -80,6 +87,9 @@ def target_wsgi_application(environ, start_response): ("nr.syntheticsResourceId", SYNTHETICS_RESOURCE_ID), ("nr.syntheticsJobId", SYNTHETICS_JOB_ID), ("nr.syntheticsMonitorId", SYNTHETICS_MONITOR_ID), + ("nr.syntheticsType", SYNTHETICS_TYPE), + ("nr.syntheticsInitiator", SYNTHETICS_INITIATOR), + ("nr.syntheticsExampleAttribute", "1"), ] _test_valid_synthetics_event_forgone = [] @@ -89,21 +99,51 @@ def target_wsgi_application(environ, start_response): ) @override_application_settings(_override_settings) def test_valid_synthetics_event(): - headers = _make_synthetics_header() + headers = _make_synthetics_headers() + response = target_application.get("/", headers=headers) + + +_test_valid_synthetics_event_without_info_required = [ + ("nr.syntheticsResourceId", SYNTHETICS_RESOURCE_ID), + ("nr.syntheticsJobId", SYNTHETICS_JOB_ID), + ("nr.syntheticsMonitorId", SYNTHETICS_MONITOR_ID), +] +_test_valid_synthetics_event_without_info_forgone = [ + "nr.syntheticsType", + "nr.syntheticsInitiator", + "nr.syntheticsExampleAttribute", +] + + +@validate_synthetics_event( + _test_valid_synthetics_event_without_info_required, _test_valid_synthetics_event_without_info_forgone, should_exist=True +) +@override_application_settings(_override_settings) +def test_valid_synthetics_event_without_info(): + headers = _make_synthetics_headers(type_=None, initiator=None, attributes=None) response = target_application.get("/", headers=headers) @validate_synthetics_event([], [], should_exist=False) @override_application_settings(_override_settings) def test_no_synthetics_event_unsupported_version(): - headers = _make_synthetics_header(version="0") + headers = _make_synthetics_headers(version="0") + response = target_application.get("/", headers=headers) + + +@validate_synthetics_event( + _test_valid_synthetics_event_without_info_required, _test_valid_synthetics_event_without_info_forgone, should_exist=True +) +@override_application_settings(_override_settings) +def test_synthetics_event_unsupported_info_version(): + headers = _make_synthetics_headers(info_version="0") response = target_application.get("/", headers=headers) @validate_synthetics_event([], [], should_exist=False) @override_application_settings(_override_settings) def test_no_synthetics_event_untrusted_account(): - headers = _make_synthetics_header(account_id="999") + headers = _make_synthetics_headers(account_id="999") response = target_application.get("/", headers=headers) @@ -111,7 +151,20 @@ def test_no_synthetics_event_untrusted_account(): @override_application_settings(_override_settings) def test_no_synthetics_event_mismatched_encoding_key(): encoding_key = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz" - headers = _make_synthetics_header(encoding_key=encoding_key) + headers = _make_synthetics_headers(encoding_key=encoding_key) + response = target_application.get("/", headers=headers) + + +@validate_synthetics_event( + _test_valid_synthetics_event_without_info_required, _test_valid_synthetics_event_without_info_forgone, should_exist=True +) +@override_application_settings(_override_settings) +def test_synthetics_event_mismatched_info_encoding_key(): + encoding_key = "zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz" + headers = { + "X-NewRelic-Synthetics": _make_synthetics_headers(type_=None)["X-NewRelic-Synthetics"], + "X-NewRelic-Synthetics-Info": _make_synthetics_headers(encoding_key=encoding_key)["X-NewRelic-Synthetics-Info"], + } response = target_application.get("/", headers=headers) @@ -119,6 +172,9 @@ def test_no_synthetics_event_mismatched_encoding_key(): "synthetics_resource_id": SYNTHETICS_RESOURCE_ID, "synthetics_job_id": SYNTHETICS_JOB_ID, "synthetics_monitor_id": SYNTHETICS_MONITOR_ID, + "synthetics_type": SYNTHETICS_TYPE, + "synthetics_initiator": SYNTHETICS_INITIATOR, + "synthetics_example_attribute": "1", } @@ -126,7 +182,7 @@ def test_no_synthetics_event_mismatched_encoding_key(): @validate_synthetics_transaction_trace(_test_valid_synthetics_tt_required) @override_application_settings(_override_settings) def test_valid_synthetics_in_transaction_trace(): - headers = _make_synthetics_header() + headers = _make_synthetics_headers() response = target_application.get("/", headers=headers) @@ -146,26 +202,36 @@ def test_no_synthetics_in_transaction_trace(): @validate_synthetics_event([], [], should_exist=False) @override_application_settings(_disabled_settings) def test_synthetics_disabled(): - headers = _make_synthetics_header() + headers = _make_synthetics_headers() response = target_application.get("/", headers=headers) -_external_synthetics_header = ("X-NewRelic-Synthetics", _make_synthetics_header()["X-NewRelic-Synthetics"]) +_external_synthetics_headers = _make_synthetics_headers() +_external_synthetics_header = _external_synthetics_headers["X-NewRelic-Synthetics"] +_external_synthetics_info_header = _external_synthetics_headers["X-NewRelic-Synthetics-Info"] @cat_enabled -@validate_synthetics_external_trace_header(required_header=_external_synthetics_header, should_exist=True) +@validate_synthetics_external_trace_header(_external_synthetics_header, _external_synthetics_info_header) @override_application_settings(_override_settings) def test_valid_synthetics_external_trace_header(): - headers = _make_synthetics_header() + headers = _make_synthetics_headers() + response = target_application.get("/", headers=headers) + + +@cat_enabled +@validate_synthetics_external_trace_header(_external_synthetics_header, None) +@override_application_settings(_override_settings) +def test_valid_synthetics_external_trace_header_without_info(): + headers = _make_synthetics_headers(type_=None) response = target_application.get("/", headers=headers) @cat_enabled -@validate_synthetics_external_trace_header(required_header=_external_synthetics_header, should_exist=True) +@validate_synthetics_external_trace_header(_external_synthetics_header, _external_synthetics_info_header) @override_application_settings(_override_settings) def test_valid_external_trace_header_with_byte_inbound_header(): - headers = _make_synthetics_header() + headers = _make_synthetics_headers() headers = {k.encode("utf-8"): v.encode("utf-8") for k, v in headers.items()} @web_transaction( @@ -178,7 +244,7 @@ def webapp(): webapp() -@validate_synthetics_external_trace_header(should_exist=False) +@validate_synthetics_external_trace_header(None, None) @override_application_settings(_override_settings) def test_no_synthetics_external_trace_header(): response = target_application.get("/") @@ -194,7 +260,7 @@ def _synthetics_limit_test(num_requests, num_events, num_transactions): # Send requests - headers = _make_synthetics_header() + headers = _make_synthetics_headers() for i in range(num_requests): response = target_application.get("/", headers=headers) diff --git a/tests/agent_streaming/test_infinite_tracing.py b/tests/agent_streaming/test_infinite_tracing.py index f1119c38c..59060347e 100644 --- a/tests/agent_streaming/test_infinite_tracing.py +++ b/tests/agent_streaming/test_infinite_tracing.py @@ -389,12 +389,12 @@ def _test(): # Wait for OK status code to close the channel start_time = time.time() while not (request_iterator._stream and request_iterator._stream.done()): - assert time.time() - start_time < 5, "Timed out waiting for OK status code." + assert time.time() - start_time < 15, "Timed out waiting for OK status code." time.sleep(0.5) # Put new span and wait until buffer has been emptied and either sent or lost stream_buffer.put(span) - assert spans_processed_event.wait(timeout=5), "Data lost in stream buffer iterator." + assert spans_processed_event.wait(timeout=15), "Data lost in stream buffer iterator." _test() diff --git a/tests/agent_unittests/test_encoding_utils.py b/tests/agent_unittests/test_encoding_utils.py new file mode 100644 index 000000000..397f2fa2e --- /dev/null +++ b/tests/agent_unittests/test_encoding_utils.py @@ -0,0 +1,52 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from newrelic.common.encoding_utils import camel_case, snake_case + + +@pytest.mark.parametrize("input_,expected,upper", [ + ("", "", False), + ("", "", True), + ("my_string", "myString", False), + ("my_string", "MyString", True), + ("LeaveCase", "LeaveCase", False), + ("correctCase", "CorrectCase", True), + ("UPPERcaseLETTERS", "UPPERcaseLETTERS", False), + ("UPPERcaseLETTERS", "UPPERcaseLETTERS", True), + ("lowerCASEletters", "lowerCASEletters", False), + ("lowerCASEletters", "LowerCASEletters", True), + ("very_long_snake_string", "VeryLongSnakeString", True), + ("kebab-case", "kebab-case", False), +]) +def test_camel_case(input_, expected, upper): + output = camel_case(input_, upper=upper) + assert output == expected + + +@pytest.mark.parametrize("input_,expected", [ + ("", ""), + ("", ""), + ("my_string", "my_string"), + ("myString", "my_string"), + ("MyString", "my_string"), + ("UPPERcaseLETTERS", "uppercase_letters"), + ("lowerCASEletters", "lower_caseletters"), + ("VeryLongCamelString", "very_long_camel_string"), + ("kebab-case", "kebab-case"), +]) +def test_snake_case(input_, expected): + output = snake_case(input_) + assert output == expected diff --git a/tests/agent_unittests/test_environment.py b/tests/agent_unittests/test_environment.py index b2c639adc..84dd753a9 100644 --- a/tests/agent_unittests/test_environment.py +++ b/tests/agent_unittests/test_environment.py @@ -15,9 +15,13 @@ import sys import pytest +from testing_support.fixtures import override_generic_settings +from newrelic.core.config import global_settings from newrelic.core.environment import environment_settings +settings = global_settings() + def module(version): class Module(object): @@ -47,6 +51,23 @@ def test_plugin_list(): assert "pytest (%s)" % (pytest.__version__) in plugin_list +@override_generic_settings(settings, {"package_reporting.enabled": False}) +def test_plugin_list_when_package_reporting_disabled(): + # Let's pretend we fired an import hook + import newrelic.hooks.adapter_gunicorn # noqa: F401 + + environment_info = environment_settings() + + for key, plugin_list in environment_info: + if key == "Plugin List": + break + else: + assert False, "'Plugin List' not found" + + # Check that bogus plugins don't get reported + assert plugin_list == [] + + class NoIteratorDict(object): def __init__(self, d): self.d = d diff --git a/tests/agent_unittests/test_harvest_loop.py b/tests/agent_unittests/test_harvest_loop.py index 15b67a81e..a3eaf7b5f 100644 --- a/tests/agent_unittests/test_harvest_loop.py +++ b/tests/agent_unittests/test_harvest_loop.py @@ -143,6 +143,10 @@ def transaction_node(request): synthetics_job_id=None, synthetics_monitor_id=None, synthetics_header=None, + synthetics_type=None, + synthetics_initiator=None, + synthetics_attributes=None, + synthetics_info_header=None, is_part_of_cat=False, trip_id="4485b89db608aece", path_hash=None, diff --git a/tests/agent_unittests/test_wrappers.py b/tests/agent_unittests/test_wrappers.py new file mode 100644 index 000000000..eccee4df5 --- /dev/null +++ b/tests/agent_unittests/test_wrappers.py @@ -0,0 +1,81 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from newrelic.common.object_wrapper import function_wrapper + + +@pytest.fixture(scope="function") +def wrapper(): + @function_wrapper + def _wrapper(wrapped, instance, args, kwargs): + return wrapped(*args, **kwargs) + + return _wrapper + + +@pytest.fixture(scope="function") +def wrapped_function(wrapper): + @wrapper + def wrapped(): + return True + + return wrapped + + +def test_nr_prefix_attributes(wrapped_function): + wrapped_function._nr_attr = 1 + vars_ = vars(wrapped_function) + + assert wrapped_function._nr_attr == 1, "_nr_ attributes should be stored on wrapper object and retrievable." + assert "_nr_attr" not in vars_, "_nr_ attributes should NOT appear in __dict__." + + +def test_self_prefix_attributes(wrapped_function): + wrapped_function._self_attr = 1 + vars_ = vars(wrapped_function) + + assert wrapped_function._self_attr == 1, "_self_ attributes should be stored on wrapper object and retrievable." + assert "_nr_attr" not in vars_, "_self_ attributes should NOT appear in __dict__." + + +def test_prefixed_attributes_share_namespace(wrapped_function): + wrapped_function._nr_attr = 1 + wrapped_function._self_attr = 2 + + assert ( + wrapped_function._nr_attr == 2 + ), "_nr_ attributes share a namespace with _self_ attributes and should be overwritten." + + +def test_wrapped_function_attributes(wrapped_function): + wrapped_function._other_attr = 1 + vars_ = vars(wrapped_function) + + assert wrapped_function._other_attr == 1, "All other attributes should be stored on wrapped object and retrievable." + assert "_other_attr" in vars_, "Other types of attributes SHOULD appear in __dict__." + + assert wrapped_function() + + +def test_multiple_wrapper_last_object(wrapper): + def wrapped(): + pass + + wrapper_1 = wrapper(wrapped) + wrapper_2 = wrapper(wrapper_1) + + assert wrapper_2._nr_last_object is wrapped, "Last object in chain should be the wrapped function." + assert wrapper_2._nr_next_object is wrapper_1, "Next object in chain should be the middle function." diff --git a/tests/cross_agent/fixtures/docker_container_id_v2/README.md b/tests/cross_agent/fixtures/docker_container_id_v2/README.md new file mode 100644 index 000000000..ea6cc2503 --- /dev/null +++ b/tests/cross_agent/fixtures/docker_container_id_v2/README.md @@ -0,0 +1,6 @@ +These tests cover parsing of Docker container IDs on Linux hosts out of +`/proc/self/mountinfo` (or `/proc//mountinfo` more generally). + +The `cases.json` file lists each filename in this directory containing +example `/proc/self/mountinfo` content, and the expected Docker container ID that +should be parsed from that file. diff --git a/tests/cross_agent/fixtures/docker_container_id_v2/cases.json b/tests/cross_agent/fixtures/docker_container_id_v2/cases.json new file mode 100644 index 000000000..83d6360a3 --- /dev/null +++ b/tests/cross_agent/fixtures/docker_container_id_v2/cases.json @@ -0,0 +1,36 @@ +[ + { + "filename": "docker-20.10.16.txt", + "containerId": "84cf3472a20d1bfb4b50e48b6ff50d96dfcd812652d76dd907951e6f98997bce", + "expectedMetrics": null + }, + { + "filename": "docker-24.0.2.txt", + "containerId": "b0a24eed1b031271d8ba0784b8f354b3da892dfd08bbcf14dd7e8a1cf9292f65", + "expectedMetrics": null + }, + { + "filename": "empty.txt", + "containerId": null, + "expectedMetrics": null + }, + { + "filename": "invalid-characters.txt", + "containerId": null, + "expectedMetrics": null + }, + { + "filename": "docker-too-long.txt", + "containerId": null, + "expectedMetrics": null + }, + { + "filename": "invalid-length.txt", + "containerId": null, + "expectedMetrics": { + "Supportability/utilization/docker/error": { + "callCount": 1 + } + } + } +] diff --git a/tests/cross_agent/fixtures/docker_container_id_v2/docker-20.10.16.txt b/tests/cross_agent/fixtures/docker_container_id_v2/docker-20.10.16.txt new file mode 100644 index 000000000..ce2b1bedf --- /dev/null +++ b/tests/cross_agent/fixtures/docker_container_id_v2/docker-20.10.16.txt @@ -0,0 +1,24 @@ +519 413 0:152 / / rw,relatime master:180 - overlay overlay rw,lowerdir=/var/lib/docker/overlay2/l/YCID3333O5VYPYDNTQRZX4GI67:/var/lib/docker/overlay2/l/G7H4TULAFM2UBFRL7QFQPUNXY5:/var/lib/docker/overlay2/l/RLC4GCL75VGXXXYJJO57STHIYN:/var/lib/docker/overlay2/l/YOZKNWFAP6YX74XEKPHX4KG4UN:/var/lib/docker/overlay2/l/46EQ6YX5PQQZ4Z3WCSMQ6Z4YWI:/var/lib/docker/overlay2/l/KGKX3Z5ZMOCDWOFKBS2FSHMQMQ:/var/lib/docker/overlay2/l/CKFYAF4TXZD4RCE6RG6UNL5WVI,upperdir=/var/lib/docker/overlay2/358c429f7b04ee5a228b94efaebe3413a98fcc676b726f078fe875727e3bddd2/diff,workdir=/var/lib/docker/overlay2/358c429f7b04ee5a228b94efaebe3413a98fcc676b726f078fe875727e3bddd2/work +520 519 0:155 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +521 519 0:156 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +522 521 0:157 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +523 519 0:158 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro +524 523 0:30 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup rw +525 521 0:154 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +526 521 0:159 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k +527 519 254:1 /docker/volumes/3237dea4f8022f1addd7b6f072a9c847eb3e5b8df0d599f462ba7040884d4618/_data /data rw,relatime master:28 - ext4 /dev/vda1 rw +528 519 254:1 /docker/containers/84cf3472a20d1bfb4b50e48b6ff50d96dfcd812652d76dd907951e6f98997bce/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/vda1 rw +529 519 254:1 /docker/containers/84cf3472a20d1bfb4b50e48b6ff50d96dfcd812652d76dd907951e6f98997bce/hostname /etc/hostname rw,relatime - ext4 /dev/vda1 rw +530 519 254:1 /docker/containers/84cf3472a20d1bfb4b50e48b6ff50d96dfcd812652d76dd907951e6f98997bce/hosts /etc/hosts rw,relatime - ext4 /dev/vda1 rw +414 521 0:157 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +415 520 0:155 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw +416 520 0:155 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw +417 520 0:155 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw +418 520 0:155 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw +419 520 0:155 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw +420 520 0:160 / /proc/acpi ro,relatime - tmpfs tmpfs ro +421 520 0:156 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +422 520 0:156 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +423 520 0:156 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +424 520 0:156 /null /proc/sched_debug rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +425 523 0:161 / /sys/firmware ro,relatime - tmpfs tmpfs ro diff --git a/tests/cross_agent/fixtures/docker_container_id_v2/docker-24.0.2.txt b/tests/cross_agent/fixtures/docker_container_id_v2/docker-24.0.2.txt new file mode 100644 index 000000000..1725e7726 --- /dev/null +++ b/tests/cross_agent/fixtures/docker_container_id_v2/docker-24.0.2.txt @@ -0,0 +1,21 @@ +1014 1013 0:269 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +1019 1013 0:270 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +1020 1019 0:271 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +1021 1013 0:272 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro +1022 1021 0:30 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup rw +1023 1019 0:268 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +1024 1019 0:273 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k +1025 1013 254:1 /docker/containers/b0a24eed1b031271d8ba0784b8f354b3da892dfd08bbcf14dd7e8a1cf9292f65/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/vda1 rw,discard +1026 1013 254:1 /docker/containers/b0a24eed1b031271d8ba0784b8f354b3da892dfd08bbcf14dd7e8a1cf9292f65/hostname /etc/hostname rw,relatime - ext4 /dev/vda1 rw,discard +1027 1013 254:1 /docker/containers/b0a24eed1b031271d8ba0784b8f354b3da892dfd08bbcf14dd7e8a1cf9292f65/hosts /etc/hosts rw,relatime - ext4 /dev/vda1 rw,discard +717 1019 0:271 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +718 1014 0:269 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw +719 1014 0:269 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw +720 1014 0:269 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw +721 1014 0:269 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw +723 1014 0:269 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw +726 1014 0:274 / /proc/acpi ro,relatime - tmpfs tmpfs ro +727 1014 0:270 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +728 1014 0:270 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +729 1014 0:270 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +730 1021 0:275 / /sys/firmware ro,relatime - tmpfs tmpfs ro diff --git a/tests/cross_agent/fixtures/docker_container_id_v2/docker-too-long.txt b/tests/cross_agent/fixtures/docker_container_id_v2/docker-too-long.txt new file mode 100644 index 000000000..608eaf7a4 --- /dev/null +++ b/tests/cross_agent/fixtures/docker_container_id_v2/docker-too-long.txt @@ -0,0 +1,21 @@ +1014 1013 0:269 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +1019 1013 0:270 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +1020 1019 0:271 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +1021 1013 0:272 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro +1022 1021 0:30 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup rw +1023 1019 0:268 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +1024 1019 0:273 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k +1025 1013 254:1 /docker/containers/3ccfa00432798ff38f85839de1e396f771b4acbe9f4ddea0a761c39b9790a7821/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/vda1 rw,discard +1026 1013 254:1 /docker/containers/3ccfa00432798ff38f85839de1e396f771b4acbe9f4ddea0a761c39b9790a7821/hostname /etc/hostname rw,relatime - ext4 /dev/vda1 rw,discard +1027 1013 254:1 /docker/containers/3ccfa00432798ff38f85839de1e396f771b4acbe9f4ddea0a761c39b9790a7821/hosts /etc/hosts rw,relatime - ext4 /dev/vda1 rw,discard +717 1019 0:271 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +718 1014 0:269 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw +719 1014 0:269 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw +720 1014 0:269 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw +721 1014 0:269 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw +723 1014 0:269 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw +726 1014 0:274 / /proc/acpi ro,relatime - tmpfs tmpfs ro +727 1014 0:270 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +728 1014 0:270 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +729 1014 0:270 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +730 1021 0:275 / /sys/firmware ro,relatime - tmpfs tmpfs ro diff --git a/tests/cross_agent/fixtures/docker_container_id_v2/empty.txt b/tests/cross_agent/fixtures/docker_container_id_v2/empty.txt new file mode 100644 index 000000000..e69de29bb diff --git a/tests/cross_agent/fixtures/docker_container_id_v2/invalid-characters.txt b/tests/cross_agent/fixtures/docker_container_id_v2/invalid-characters.txt new file mode 100644 index 000000000..b561475ac --- /dev/null +++ b/tests/cross_agent/fixtures/docker_container_id_v2/invalid-characters.txt @@ -0,0 +1,21 @@ +1014 1013 0:269 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +1019 1013 0:270 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +1020 1019 0:271 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +1021 1013 0:272 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro +1022 1021 0:30 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup rw +1023 1019 0:268 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +1024 1019 0:273 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k +1025 1013 254:1 /docker/containers/WRONGINCORRECTINVALIDCHARSERRONEOUSBADPHONYBROKEN2TERRIBLENOPE55/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/vda1 rw,discard +1026 1013 254:1 /docker/containers/WRONGINCORRECTINVALIDCHARSERRONEOUSBADPHONYBROKEN2TERRIBLENOPE55/hostname /etc/hostname rw,relatime - ext4 /dev/vda1 rw,discard +1027 1013 254:1 /docker/containers/WRONGINCORRECTINVALIDCHARSERRONEOUSBADPHONYBROKEN2TERRIBLENOPE55/hosts /etc/hosts rw,relatime - ext4 /dev/vda1 rw,discard +717 1019 0:271 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +718 1014 0:269 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw +719 1014 0:269 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw +720 1014 0:269 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw +721 1014 0:269 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw +723 1014 0:269 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw +726 1014 0:274 / /proc/acpi ro,relatime - tmpfs tmpfs ro +727 1014 0:270 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +728 1014 0:270 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +729 1014 0:270 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +730 1021 0:275 / /sys/firmware ro,relatime - tmpfs tmpfs ro diff --git a/tests/cross_agent/fixtures/docker_container_id_v2/invalid-length.txt b/tests/cross_agent/fixtures/docker_container_id_v2/invalid-length.txt new file mode 100644 index 000000000..a8987df70 --- /dev/null +++ b/tests/cross_agent/fixtures/docker_container_id_v2/invalid-length.txt @@ -0,0 +1,21 @@ +1014 1013 0:269 / /proc rw,nosuid,nodev,noexec,relatime - proc proc rw +1019 1013 0:270 / /dev rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +1020 1019 0:271 / /dev/pts rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +1021 1013 0:272 / /sys ro,nosuid,nodev,noexec,relatime - sysfs sysfs ro +1022 1021 0:30 / /sys/fs/cgroup ro,nosuid,nodev,noexec,relatime - cgroup2 cgroup rw +1023 1019 0:268 / /dev/mqueue rw,nosuid,nodev,noexec,relatime - mqueue mqueue rw +1024 1019 0:273 / /dev/shm rw,nosuid,nodev,noexec,relatime - tmpfs shm rw,size=65536k +1025 1013 254:1 /docker/containers/47cbd16b77c5/resolv.conf /etc/resolv.conf rw,relatime - ext4 /dev/vda1 rw,discard +1026 1013 254:1 /docker/containers/47cbd16b77c5/hostname /etc/hostname rw,relatime - ext4 /dev/vda1 rw,discard +1027 1013 254:1 /docker/containers/47cbd16b77c5/hosts /etc/hosts rw,relatime - ext4 /dev/vda1 rw,discard +717 1019 0:271 /0 /dev/console rw,nosuid,noexec,relatime - devpts devpts rw,gid=5,mode=620,ptmxmode=666 +718 1014 0:269 /bus /proc/bus ro,nosuid,nodev,noexec,relatime - proc proc rw +719 1014 0:269 /fs /proc/fs ro,nosuid,nodev,noexec,relatime - proc proc rw +720 1014 0:269 /irq /proc/irq ro,nosuid,nodev,noexec,relatime - proc proc rw +721 1014 0:269 /sys /proc/sys ro,nosuid,nodev,noexec,relatime - proc proc rw +723 1014 0:269 /sysrq-trigger /proc/sysrq-trigger ro,nosuid,nodev,noexec,relatime - proc proc rw +726 1014 0:274 / /proc/acpi ro,relatime - tmpfs tmpfs ro +727 1014 0:270 /null /proc/kcore rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +728 1014 0:270 /null /proc/keys rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +729 1014 0:270 /null /proc/timer_list rw,nosuid - tmpfs tmpfs rw,size=65536k,mode=755 +730 1021 0:275 / /sys/firmware ro,relatime - tmpfs tmpfs ro diff --git a/tests/cross_agent/test_docker.py b/tests/cross_agent/test_docker_container_id.py similarity index 71% rename from tests/cross_agent/test_docker.py rename to tests/cross_agent/test_docker_container_id.py index fd919932b..f14e80fcd 100644 --- a/tests/cross_agent/test_docker.py +++ b/tests/cross_agent/test_docker_container_id.py @@ -39,13 +39,23 @@ def _load_docker_test_attributes(): return docker_test_attributes +def mock_open(mock_file): + def _mock_open(filename, mode): + if filename == "/proc/self/mountinfo": + raise FileNotFoundError() + elif filename == "/proc/self/cgroup": + return mock_file + raise RuntimeError() + return _mock_open + + @pytest.mark.parametrize('filename, containerId', _load_docker_test_attributes()) -def test_docker_container_id(filename, containerId): +def test_docker_container_id_v1(monkeypatch, filename, containerId): path = os.path.join(DOCKER_FIXTURE, filename) with open(path, 'rb') as f: - with mock.patch.object(u, 'open', create=True, return_value=f): - if containerId is not None: - assert u.DockerUtilization.detect() == {'id': containerId} - else: - assert u.DockerUtilization.detect() is None + monkeypatch.setattr(u, "open", mock_open(f), raising=False) + if containerId is not None: + assert u.DockerUtilization.detect() == {'id': containerId} + else: + assert u.DockerUtilization.detect() is None diff --git a/tests/cross_agent/test_docker_container_id_v2.py b/tests/cross_agent/test_docker_container_id_v2.py new file mode 100644 index 000000000..eee4e1305 --- /dev/null +++ b/tests/cross_agent/test_docker_container_id_v2.py @@ -0,0 +1,61 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import mock +import os +import pytest + +import newrelic.common.utilization as u + +CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) +DOCKER_FIXTURE = os.path.join(CURRENT_DIR, 'fixtures', 'docker_container_id_v2') + + +def _load_docker_test_attributes(): + """Returns a list of docker test attributes in the form: + [(, ), ...] + + """ + docker_test_attributes = [] + test_cases = os.path.join(DOCKER_FIXTURE, 'cases.json') + with open(test_cases, 'r') as fh: + js = fh.read() + json_list = json.loads(js) + for json_record in json_list: + docker_test_attributes.append( + (json_record['filename'], json_record['containerId'])) + return docker_test_attributes + + +def mock_open(mock_file): + def _mock_open(filename, mode): + if filename == "/proc/self/cgroup": + raise FileNotFoundError() + elif filename == "/proc/self/mountinfo": + return mock_file + raise RuntimeError() + return _mock_open + + +@pytest.mark.parametrize('filename, containerId', + _load_docker_test_attributes()) +def test_docker_container_id_v2(monkeypatch, filename, containerId): + path = os.path.join(DOCKER_FIXTURE, filename) + with open(path, 'rb') as f: + monkeypatch.setattr(u, "open", mock_open(f), raising=False) + if containerId is not None: + assert u.DockerUtilization.detect() == {'id': containerId} + else: + assert u.DockerUtilization.detect() is None diff --git a/tests/cross_agent/test_lambda_event_source.py b/tests/cross_agent/test_lambda_event_source.py index 511294cf6..de796a6b0 100644 --- a/tests/cross_agent/test_lambda_event_source.py +++ b/tests/cross_agent/test_lambda_event_source.py @@ -14,27 +14,30 @@ import json import os + import pytest +from testing_support.fixtures import override_application_settings +from testing_support.validators.validate_transaction_event_attributes import ( + validate_transaction_event_attributes, +) from newrelic.api.lambda_handler import lambda_handler -from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) -FIXTURE_DIR = os.path.normpath(os.path.join(CURRENT_DIR, 'fixtures')) -FIXTURE = os.path.join(FIXTURE_DIR, 'lambda_event_source.json') +FIXTURE_DIR = os.path.normpath(os.path.join(CURRENT_DIR, "fixtures")) +FIXTURE = os.path.join(FIXTURE_DIR, "lambda_event_source.json") tests = {} events = {} def _load_tests(): - with open(FIXTURE, 'r') as fh: + with open(FIXTURE, "r") as fh: for test in json.loads(fh.read()): - test_name = test.pop('name') + test_name = test.pop("name") - test_file = test_name + '.json' - path = os.path.join(FIXTURE_DIR, 'lambda_event_source', test_file) - with open(path, 'r') as fh: + test_file = test_name + ".json" + path = os.path.join(FIXTURE_DIR, "lambda_event_source", test_file) + with open(path, "r") as fh: events[test_name] = json.loads(fh.read()) tests[test_name] = test @@ -42,37 +45,39 @@ def _load_tests(): class Context(object): - aws_request_id = 'cookies' - invoked_function_arn = 'arn' - function_name = 'cats' - function_version = '$LATEST' + aws_request_id = "cookies" + invoked_function_arn = "arn" + function_name = "cats" + function_version = "$LATEST" memory_limit_in_mb = 128 @lambda_handler() def handler(event, context): return { - 'statusCode': '200', - 'body': '{}', - 'headers': { - 'Content-Type': 'application/json', - 'Content-Length': 2, + "statusCode": "200", + "body": "{}", + "headers": { + "Content-Type": "application/json", + "Content-Length": 2, }, } -@pytest.mark.parametrize('test_name', _load_tests()) +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") +@pytest.mark.parametrize("test_name", _load_tests()) def test_lambda_event_source(test_name): - _exact = {'user': {}, 'intrinsic': {}, 'agent': {}} + _exact = {"user": {}, "intrinsic": {}, "agent": {}} - expected_arn = tests[test_name].get('aws.lambda.eventSource.arn', None) + expected_arn = tests[test_name].get("aws.lambda.eventSource.arn", None) if expected_arn: - _exact['agent']['aws.lambda.eventSource.arn'] = expected_arn + _exact["agent"]["aws.lambda.eventSource.arn"] = expected_arn else: pytest.skip("Nothing to test!") return - @override_application_settings({'attributes.include': ['aws.*']}) + @override_application_settings({"attributes.include": ["aws.*"]}) @validate_transaction_event_attributes({}, exact_attrs=_exact) def _test(): handler(events[test_name], Context) diff --git a/tests/datastore_asyncpg/test_multiple_dbs.py b/tests/datastore_asyncpg/test_multiple_dbs.py index a917a9e83..9d7a3de95 100644 --- a/tests/datastore_asyncpg/test_multiple_dbs.py +++ b/tests/datastore_asyncpg/test_multiple_dbs.py @@ -12,20 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -import asyncio - import asyncpg import pytest from testing_support.db_settings import postgresql_settings from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple DB_MULTIPLE_SETTINGS = postgresql_settings() -ASYNCPG_VERSION = tuple(int(x) for x in getattr(asyncpg, "__version__", "0.0").split(".")[:2]) +ASYNCPG_VERSION = get_package_version_tuple("asyncpg") if ASYNCPG_VERSION < (0, 11): CONNECT_METRICS = [] @@ -100,7 +101,6 @@ async def _exercise_db(): - postgresql1 = DB_MULTIPLE_SETTINGS[0] postgresql2 = DB_MULTIPLE_SETTINGS[1] @@ -145,6 +145,7 @@ async def _exercise_db(): ) @background_task() def test_multiple_databases_enable_instance(event_loop): + assert ASYNCPG_VERSION is not None event_loop.run_until_complete(_exercise_db()) @@ -161,4 +162,5 @@ def test_multiple_databases_enable_instance(event_loop): ) @background_task() def test_multiple_databases_disable_instance(event_loop): + assert ASYNCPG_VERSION is not None event_loop.run_until_complete(_exercise_db()) diff --git a/tests/datastore_asyncpg/test_query.py b/tests/datastore_asyncpg/test_query.py index 838ced61d..6deb7ca9a 100644 --- a/tests/datastore_asyncpg/test_query.py +++ b/tests/datastore_asyncpg/test_query.py @@ -27,12 +27,13 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple DB_SETTINGS = postgresql_settings()[0] PG_PREFIX = "Datastore/operation/Postgres/" -ASYNCPG_VERSION = tuple(int(x) for x in getattr(asyncpg, "__version__", "0.0").split(".")[:2]) +ASYNCPG_VERSION = get_package_version_tuple("asyncpg") if ASYNCPG_VERSION < (0, 11): CONNECT_METRICS = () @@ -65,6 +66,7 @@ def conn(event_loop): @background_task(name="test_single") @pytest.mark.parametrize("method", ("execute",)) def test_single(event_loop, method, conn): + assert ASYNCPG_VERSION is not None _method = getattr(conn, method) event_loop.run_until_complete(_method("""SELECT 0""")) @@ -81,6 +83,7 @@ def test_single(event_loop, method, conn): @background_task(name="test_prepared_single") @pytest.mark.parametrize("method", ("fetch", "fetchrow", "fetchval")) def test_prepared_single(event_loop, method, conn): + assert ASYNCPG_VERSION is not None _method = getattr(conn, method) event_loop.run_until_complete(_method("""SELECT 0""")) @@ -93,6 +96,7 @@ def test_prepared_single(event_loop, method, conn): ) @background_task(name="test_prepare") def test_prepare(event_loop, conn): + assert ASYNCPG_VERSION is not None event_loop.run_until_complete(conn.prepare("""SELECT 0""")) @@ -125,6 +129,7 @@ async def amain(): # 2 statements await conn.copy_from_query("""SELECT 0""", output=BytesIO()) + assert ASYNCPG_VERSION is not None event_loop.run_until_complete(amain()) @@ -139,6 +144,7 @@ async def amain(): ) @background_task(name="test_select_many") def test_select_many(event_loop, conn): + assert ASYNCPG_VERSION is not None event_loop.run_until_complete(conn.executemany("""SELECT $1::int""", ((1,), (2,)))) @@ -158,6 +164,7 @@ async def amain(): async with conn.transaction(): await conn.execute("""SELECT 0""") + assert ASYNCPG_VERSION is not None event_loop.run_until_complete(amain()) @@ -181,6 +188,7 @@ async def amain(): await conn.cursor("SELECT 0") + assert ASYNCPG_VERSION is not None event_loop.run_until_complete(amain()) @@ -200,6 +208,7 @@ async def amain(): ) @background_task(name="test_unix_socket_connect") def test_unix_socket_connect(event_loop): + assert ASYNCPG_VERSION is not None with pytest.raises(OSError): event_loop.run_until_complete(asyncpg.connect("postgres://?host=/.s.PGSQL.THIS_FILE_BETTER_NOT_EXIST")) @@ -233,4 +242,5 @@ async def amain(): finally: await pool.close() + assert ASYNCPG_VERSION is not None event_loop.run_until_complete(amain()) diff --git a/tests/datastore_mysql/test_database.py b/tests/datastore_mysql/test_database.py index 8f8641903..d14e11a41 100644 --- a/tests/datastore_mysql/test_database.py +++ b/tests/datastore_mysql/test_database.py @@ -23,13 +23,15 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple DB_SETTINGS = mysql_settings() DB_SETTINGS = DB_SETTINGS[0] DB_NAMESPACE = DB_SETTINGS["namespace"] DB_PROCEDURE = "hello_" + DB_NAMESPACE -mysql_version = tuple(int(x) for x in mysql.connector.__version__.split(".")[:3]) +mysql_version = get_package_version_tuple("mysql.connector") + if mysql_version >= (8, 0, 30): _connector_metric_name = "Function/mysql.connector.pooling:connect" else: @@ -71,6 +73,12 @@ ] +@validate_transaction_metrics( + "test_database:test_execute_via_cursor", + scoped_metrics=_test_execute_via_cursor_scoped_metrics, + rollup_metrics=_test_execute_via_cursor_rollup_metrics, + background_task=True, +) @validate_transaction_metrics( "test_database:test_execute_via_cursor", scoped_metrics=_test_execute_via_cursor_scoped_metrics, @@ -80,7 +88,7 @@ @validate_database_trace_inputs(sql_parameters_type=dict) @background_task() def test_execute_via_cursor(table_name): - + assert mysql_version is not None connection = mysql.connector.connect( db=DB_SETTINGS["name"], user=DB_SETTINGS["user"], @@ -97,7 +105,7 @@ def test_execute_via_cursor(table_name): cursor.executemany( """insert into `%s` """ % table_name + """values (%(a)s, %(b)s, %(c)s)""", - [dict(a=1, b=1.0, c="1.0"), dict(a=2, b=2.2, c="2.2"), dict(a=3, b=3.3, c="3.3")], + [{"a": 1, "b": 1.0, "c": "1.0"}, {"a": 2, "b": 2.2, "c": "2.2"}, {"a": 3, "b": 3.3, "c": "3.3"}], ) cursor.execute("""select * from %s""" % table_name) @@ -107,7 +115,7 @@ def test_execute_via_cursor(table_name): cursor.execute( """update `%s` """ % table_name + """set a=%(a)s, b=%(b)s, c=%(c)s where a=%(old_a)s""", - dict(a=4, b=4.0, c="4.0", old_a=1), + {"a": 4, "b": 4.0, "c": "4.0", "old_a": 1}, ) cursor.execute("""delete from `%s` where a=2""" % table_name) @@ -173,7 +181,7 @@ def test_execute_via_cursor(table_name): @validate_database_trace_inputs(sql_parameters_type=dict) @background_task() def test_connect_using_alias(table_name): - + assert mysql_version is not None connection = mysql.connector.connect( db=DB_SETTINGS["name"], user=DB_SETTINGS["user"], @@ -190,7 +198,7 @@ def test_connect_using_alias(table_name): cursor.executemany( """insert into `%s` """ % table_name + """values (%(a)s, %(b)s, %(c)s)""", - [dict(a=1, b=1.0, c="1.0"), dict(a=2, b=2.2, c="2.2"), dict(a=3, b=3.3, c="3.3")], + [{"a": 1, "b": 1.0, "c": "1.0"}, {"a": 2, "b": 2.2, "c": "2.2"}, {"a": 3, "b": 3.3, "c": "3.3"}], ) cursor.execute("""select * from %s""" % table_name) @@ -200,7 +208,7 @@ def test_connect_using_alias(table_name): cursor.execute( """update `%s` """ % table_name + """set a=%(a)s, b=%(b)s, c=%(c)s where a=%(old_a)s""", - dict(a=4, b=4.0, c="4.0", old_a=1), + {"a": 4, "b": 4.0, "c": "4.0", "old_a": 1}, ) cursor.execute("""delete from `%s` where a=2""" % table_name) diff --git a/tests/datastore_psycopg2cffi/test_database.py b/tests/datastore_psycopg2cffi/test_database.py index 939c5cabc..0b3ff87d3 100644 --- a/tests/datastore_psycopg2cffi/test_database.py +++ b/tests/datastore_psycopg2cffi/test_database.py @@ -32,6 +32,7 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple DB_SETTINGS = postgresql_settings()[0] @@ -91,7 +92,6 @@ def test_execute_via_cursor(): host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], ) as connection: - cursor = connection.cursor() psycopg2cffi.extensions.register_type(psycopg2cffi.extensions.UNICODE) @@ -161,7 +161,6 @@ def test_rollback_on_exception(): host=DB_SETTINGS["host"], port=DB_SETTINGS["port"], ): - raise RuntimeError("error") except RuntimeError: pass @@ -202,11 +201,11 @@ def test_rollback_on_exception(): @validate_transaction_errors(errors=[]) @background_task() def test_async_mode(): - wait = psycopg2cffi.extras.wait_select kwargs = {} - version = tuple(int(_) for _ in psycopg2cffi.__version__.split(".")) + version = get_package_version_tuple("psycopg2cffi") + assert version is not None if version >= (2, 8): kwargs["async_"] = 1 else: diff --git a/tests/external_botocore/test_boto3_iam.py b/tests/external_botocore/test_boto3_iam.py index 3d672f375..1bd05669a 100644 --- a/tests/external_botocore/test_boto3_iam.py +++ b/tests/external_botocore/test_boto3_iam.py @@ -27,8 +27,9 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) +MOTO_VERSION = get_package_version_tuple("moto") # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): diff --git a/tests/external_botocore/test_boto3_s3.py b/tests/external_botocore/test_boto3_s3.py index b6299d9f6..00972c25b 100644 --- a/tests/external_botocore/test_boto3_s3.py +++ b/tests/external_botocore/test_boto3_s3.py @@ -25,8 +25,9 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) +MOTO_VERSION = get_package_version_tuple("moto") # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): diff --git a/tests/external_botocore/test_boto3_sns.py b/tests/external_botocore/test_boto3_sns.py index 5e6c7c4b4..307aeed84 100644 --- a/tests/external_botocore/test_boto3_sns.py +++ b/tests/external_botocore/test_boto3_sns.py @@ -27,8 +27,9 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) +MOTO_VERSION = get_package_version_tuple("moto") # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): diff --git a/tests/external_botocore/test_botocore_dynamodb.py b/tests/external_botocore/test_botocore_dynamodb.py index 932fb1743..8c43ed5c4 100644 --- a/tests/external_botocore/test_botocore_dynamodb.py +++ b/tests/external_botocore/test_botocore_dynamodb.py @@ -27,8 +27,9 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) +MOTO_VERSION = get_package_version_tuple("moto") # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): diff --git a/tests/external_botocore/test_botocore_ec2.py b/tests/external_botocore/test_botocore_ec2.py index 3cb83e318..e43744f6c 100644 --- a/tests/external_botocore/test_botocore_ec2.py +++ b/tests/external_botocore/test_botocore_ec2.py @@ -27,8 +27,9 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) +MOTO_VERSION = get_package_version_tuple("moto") # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): diff --git a/tests/external_botocore/test_botocore_s3.py b/tests/external_botocore/test_botocore_s3.py index ea0c22539..5bd2feab1 100644 --- a/tests/external_botocore/test_botocore_s3.py +++ b/tests/external_botocore/test_botocore_s3.py @@ -25,9 +25,10 @@ ) from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) -BOTOCORE_VERSION = tuple(int(v) for v in botocore.__version__.split(".")[:3]) +MOTO_VERSION = MOTO_VERSION = get_package_version_tuple("moto") +BOTOCORE_VERSION = get_package_version_tuple("botocore") # patch earlier versions of moto to support py37 diff --git a/tests/external_botocore/test_botocore_sqs.py b/tests/external_botocore/test_botocore_sqs.py index 63f15801b..6a96614e5 100644 --- a/tests/external_botocore/test_botocore_sqs.py +++ b/tests/external_botocore/test_botocore_sqs.py @@ -25,9 +25,10 @@ ) from newrelic.api.background_task import background_task -from newrelic.common.package_version_utils import get_package_version +from newrelic.common.package_version_utils import get_package_version_tuple -MOTO_VERSION = tuple(int(v) for v in moto.__version__.split(".")[:3]) +MOTO_VERSION = get_package_version_tuple("moto") +BOTOCORE_VERSION = get_package_version_tuple("botocore") # patch earlier versions of moto to support py37 if sys.version_info >= (3, 7) and MOTO_VERSION <= (1, 3, 1): @@ -36,8 +37,8 @@ moto.packages.responses.responses.re._pattern_type = re.Pattern url = "sqs.us-east-1.amazonaws.com" -botocore_version = tuple([int(n) for n in get_package_version("botocore").split(".")]) -if botocore_version < (1, 29, 0): + +if BOTOCORE_VERSION < (1, 29, 0): url = "queue.amazonaws.com" AWS_ACCESS_KEY_ID = "AAAAAAAAAAAACCESSKEY" diff --git a/tests/external_requests/test_requests.py b/tests/external_requests/test_requests.py index f6f4506e5..d25d203c0 100644 --- a/tests/external_requests/test_requests.py +++ b/tests/external_requests/test_requests.py @@ -30,13 +30,19 @@ from testing_support.validators.validate_external_node_params import ( validate_external_node_params, ) -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple def get_requests_version(): - return tuple(map(int, requests.__version__.split(".")[:2])) + return get_package_version_tuple("requests") @pytest.fixture(scope="session") @@ -89,7 +95,7 @@ def test_https_request_get(server, metrics): @background_task(name="test_requests:test_https_request_get") def _test(): try: - requests.get("https://localhost:%d/" % server.port, verify=False) + requests.get("https://localhost:%d/" % server.port, verify=False) # nosec except Exception: pass diff --git a/tests/external_urllib3/test_urllib3.py b/tests/external_urllib3/test_urllib3.py index 68e15d463..92a2e93df 100644 --- a/tests/external_urllib3/test_urllib3.py +++ b/tests/external_urllib3/test_urllib3.py @@ -25,20 +25,22 @@ cache_outgoing_headers, insert_incoming_headers, ) -from testing_support.fixtures import ( - cat_enabled, - override_application_settings, -) -from testing_support.util import version2tuple +from testing_support.fixtures import cat_enabled, override_application_settings from testing_support.validators.validate_cross_process_headers import ( validate_cross_process_headers, ) from testing_support.validators.validate_external_node_params import ( validate_external_node_params, ) -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + from newrelic.api.background_task import background_task +from newrelic.common.package_version_utils import get_package_version_tuple @pytest.fixture(scope="session") @@ -185,7 +187,7 @@ def _test(): # HTTPConnection class. Previously the httplib/http.client HTTPConnection class # was used. We test httplib in a different test directory so we skip this test. @pytest.mark.skipif( - version2tuple(urllib3.__version__) < (1, 8), reason="urllib3.connection.HTTPConnection added in 1.8" + get_package_version_tuple("urllib3") < (1, 8), reason="urllib3.connection.HTTPConnection added in 1.8" ) def test_HTTPConnection_port_included(server): scoped = [("External/localhost:%d/urllib3/" % server.port, 1)] diff --git a/tests/framework_pyramid/test_append_slash_app.py b/tests/framework_pyramid/test_append_slash_app.py index f09e14b55..bb865902c 100644 --- a/tests/framework_pyramid/test_append_slash_app.py +++ b/tests/framework_pyramid/test_append_slash_app.py @@ -30,27 +30,21 @@ will have a count of 2. """ -import pkg_resources import pytest -import re +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) -from testing_support.fixtures import override_application_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.validators.validate_transaction_errors import validate_transaction_errors - -def _to_int(version_str): - m = re.match(r'\d+', version_str) - return int(m.group(0)) if m else 0 - -def pyramid_version(): - s = pkg_resources.get_distribution('pyramid').version - return tuple([_to_int(c) for c in s.split('.')[:2]]) +from newrelic.common.package_version_utils import get_package_version_tuple # Defining a `pytestmark` at the module level means py.test will apply # this `skipif` conditional to all tests in the module. -pytestmark = pytest.mark.skipif(pyramid_version() < (1, 3), - reason='requires pyramid >= (1, 3)') +pytestmark = pytest.mark.skipif(get_package_version_tuple("pyramid") < (1, 3), reason="requires pyramid >= (1, 3)") + def target_application(): # We need to delay Pyramid application creation because of ordering @@ -62,35 +56,44 @@ def target_application(): # at global scope, so import it from a separate module. from _test_append_slash_app import _test_application + return _test_application + _test_append_slash_app_index_scoped_metrics = [ - ('Python/WSGI/Application', 1), - ('Python/WSGI/Response', 1), - ('Python/WSGI/Finalize', 1), - ('Function/pyramid.router:Router.__call__', 1), - ('Function/_test_append_slash_app:home_view', 1)] + ("Python/WSGI/Application", 1), + ("Python/WSGI/Response", 1), + ("Python/WSGI/Finalize", 1), + ("Function/pyramid.router:Router.__call__", 1), + ("Function/_test_append_slash_app:home_view", 1), +] + @validate_transaction_errors(errors=[]) -@validate_transaction_metrics('_test_append_slash_app:home_view', - scoped_metrics=_test_append_slash_app_index_scoped_metrics) +@validate_transaction_metrics( + "_test_append_slash_app:home_view", scoped_metrics=_test_append_slash_app_index_scoped_metrics +) def test_index(): application = target_application() - response = application.get('/') - response.mustcontain('INDEX RESPONSE') + response = application.get("/") + response.mustcontain("INDEX RESPONSE") + _test_not_found_append_slash_scoped_metrics = [ - ('Python/WSGI/Application', 1), - ('Python/WSGI/Response', 1), - ('Python/WSGI/Finalize', 1), - ('Function/pyramid.router:Router.__call__', 1), - ('Function/pyramid.view:AppendSlashNotFoundViewFactory', 1), - ('Function/_test_append_slash_app:not_found', 1)] + ("Python/WSGI/Application", 1), + ("Python/WSGI/Response", 1), + ("Python/WSGI/Finalize", 1), + ("Function/pyramid.router:Router.__call__", 1), + ("Function/pyramid.view:AppendSlashNotFoundViewFactory", 1), + ("Function/_test_append_slash_app:not_found", 1), +] + @validate_transaction_errors(errors=[]) -@validate_transaction_metrics('_test_append_slash_app:not_found', - scoped_metrics=_test_not_found_append_slash_scoped_metrics) +@validate_transaction_metrics( + "_test_append_slash_app:not_found", scoped_metrics=_test_not_found_append_slash_scoped_metrics +) def test_not_found_append_slash(): application = target_application() - response = application.get('/foo', status=404) - response.mustcontain('NOT FOUND') + response = application.get("/foo", status=404) + response.mustcontain("NOT FOUND") diff --git a/tests/logger_structlog/conftest.py b/tests/logger_structlog/conftest.py index 05a86d8a7..191d687c9 100644 --- a/tests/logger_structlog/conftest.py +++ b/tests/logger_structlog/conftest.py @@ -14,7 +14,6 @@ import logging import pytest -from structlog import DropEvent, PrintLogger from newrelic.api.time_trace import current_trace from newrelic.api.transaction import current_transaction from testing_support.fixtures import ( @@ -41,21 +40,27 @@ ) -class StructLogCapLog(PrintLogger): - def __init__(self, caplog): - self.caplog = caplog if caplog is not None else [] +@pytest.fixture(scope="session") +def StructLogCapLog(): + from structlog import PrintLogger - def msg(self, event, **kwargs): - self.caplog.append(event) - return + class _StructLogCapLog(PrintLogger): + def __init__(self, caplog=None): + self.caplog = caplog if caplog is not None else [] - log = debug = info = warn = warning = msg - fatal = failure = err = error = critical = exception = msg + def msg(self, event, **kwargs): + self.caplog.append(event) + return - def __repr__(self): - return "" % str(id(self)) + log = debug = info = warn = warning = msg + fatal = failure = err = error = critical = exception = msg - __str__ = __repr__ + def __repr__(self): + return "" % str(id(self)) + + __str__ = __repr__ + + return _StructLogCapLog @pytest.fixture @@ -67,9 +72,13 @@ def _set(): trace = current_trace() if trace: trace.guid = "abcdefgh" + return _set + def drop_event_processor(logger, method_name, event_dict): + from structlog import DropEvent + if method_name == "info": raise DropEvent else: @@ -77,14 +86,15 @@ def drop_event_processor(logger, method_name, event_dict): @pytest.fixture(scope="function") -def structlog_caplog(): - return list() +def structlog_caplog(StructLogCapLog): + return StructLogCapLog() @pytest.fixture(scope="function") def logger(structlog_caplog): import structlog - structlog.configure(processors=[], logger_factory=lambda *args, **kwargs: StructLogCapLog(structlog_caplog)) + + structlog.configure(processors=[], logger_factory=lambda *args, **kwargs: structlog_caplog) _logger = structlog.get_logger() return _logger @@ -92,11 +102,36 @@ def logger(structlog_caplog): @pytest.fixture(scope="function") def filtering_logger(structlog_caplog): import structlog - structlog.configure(processors=[drop_event_processor], logger_factory=lambda *args, **kwargs: StructLogCapLog(structlog_caplog)) + + structlog.configure( + processors=[drop_event_processor], logger_factory=lambda *args, **kwargs: structlog_caplog + ) _filtering_logger = structlog.get_logger() return _filtering_logger +@pytest.fixture(scope="function") +def callsite_parameter_logger(structlog_caplog): + import structlog + + structlog.configure( + processors=[ + structlog.processors.CallsiteParameterAdder( + [ + structlog.processors.CallsiteParameter.FILENAME, + structlog.processors.CallsiteParameter.FUNC_NAME, + structlog.processors.CallsiteParameter.LINENO, + ], + ), + structlog.processors.KeyValueRenderer(), + ], + logger_factory=lambda *args, **kwargs: structlog_caplog, + ) + + _callsite_logger = structlog.get_logger() + return _callsite_logger + + @pytest.fixture def exercise_logging_multiple_lines(set_trace_ids, logger, structlog_caplog): def _exercise(): @@ -106,11 +141,11 @@ def _exercise(): logger.error("Dog") logger.critical("Elephant") - assert len(structlog_caplog) == 3 + assert len(structlog_caplog.caplog) == 3 - assert "Cat" in structlog_caplog[0] - assert "Dog" in structlog_caplog[1] - assert "Elephant" in structlog_caplog[2] + assert "Cat" in structlog_caplog.caplog[0] + assert "Dog" in structlog_caplog.caplog[1] + assert "Elephant" in structlog_caplog.caplog[2] return _exercise @@ -124,11 +159,11 @@ def _exercise(): filtering_logger.error("Dog") filtering_logger.critical("Elephant") - assert len(structlog_caplog) == 2 + assert len(structlog_caplog.caplog) == 2 - assert "Cat" not in structlog_caplog[0] - assert "Dog" in structlog_caplog[0] - assert "Elephant" in structlog_caplog[1] + assert "Cat" not in structlog_caplog.caplog[0] + assert "Dog" in structlog_caplog.caplog[0] + assert "Elephant" in structlog_caplog.caplog[1] return _exercise @@ -138,6 +173,6 @@ def exercise_logging_single_line(set_trace_ids, logger, structlog_caplog): def _exercise(): set_trace_ids() logger.error("A", key="value") - assert len(structlog_caplog) == 1 + assert len(structlog_caplog.caplog) == 1 return _exercise diff --git a/tests/logger_structlog/test_local_decorating.py b/tests/logger_structlog/test_local_decorating.py index 7b58d4a0c..78e99d238 100644 --- a/tests/logger_structlog/test_local_decorating.py +++ b/tests/logger_structlog/test_local_decorating.py @@ -39,7 +39,7 @@ def test_local_log_decoration_inside_transaction(exercise_logging_single_line, s @background_task() def test(): exercise_logging_single_line() - assert get_metadata_string('A', True) in structlog_caplog[0] + assert get_metadata_string('A', True) in structlog_caplog.caplog[0] test() @@ -49,6 +49,6 @@ def test_local_log_decoration_outside_transaction(exercise_logging_single_line, @validate_log_event_count_outside_transaction(1) def test(): exercise_logging_single_line() - assert get_metadata_string('A', False) in structlog_caplog[0] + assert get_metadata_string('A', False) in structlog_caplog.caplog[0] test() diff --git a/tests/logger_structlog/test_structlog_processors.py b/tests/logger_structlog/test_structlog_processors.py new file mode 100644 index 000000000..cf6887c58 --- /dev/null +++ b/tests/logger_structlog/test_structlog_processors.py @@ -0,0 +1,25 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from newrelic.api.background_task import background_task +from testing_support.fixtures import reset_core_stats_engine + + +@reset_core_stats_engine() +@background_task() +def test_callsite_parameter_processor(callsite_parameter_logger, structlog_caplog): + callsite_parameter_logger.msg("Dog") + assert "Dog" in structlog_caplog.caplog[0] + assert "filename='test_structlog_processors.py'" in structlog_caplog.caplog[0] + assert "func_name='test_callsite_parameter_processor'" in structlog_caplog.caplog[0] diff --git a/tests/mlmodel_sklearn/test_linear_models.py b/tests/mlmodel_sklearn/test_linear_models.py index 582a4750e..82c339748 100644 --- a/tests/mlmodel_sklearn/test_linear_models.py +++ b/tests/mlmodel_sklearn/test_linear_models.py @@ -22,6 +22,7 @@ from newrelic.packages import six SKLEARN_VERSION = tuple(map(int, get_package_version("sklearn").split("."))) +SCIPY_VERSION = tuple(map(int, get_package_version("scipy").split("."))) @pytest.mark.parametrize( @@ -324,6 +325,10 @@ def _run(linear_model_name): clf = getattr(sklearn.linear_model, linear_model_name)() + if linear_model_name == "QuantileRegressor" and SCIPY_VERSION > (1, 11, 0): + # Silence warnings and errors related to solver change + clf.solver = "highs" + model = clf.fit(x_train, y_train) model.predict(x_test) diff --git a/tests/testing_support/external_fixtures.py b/tests/testing_support/external_fixtures.py index a968fe2d1..de746c38b 100644 --- a/tests/testing_support/external_fixtures.py +++ b/tests/testing_support/external_fixtures.py @@ -51,8 +51,10 @@ def create_incoming_headers(transaction): return headers -def validate_synthetics_external_trace_header(required_header=(), - should_exist=True): +def validate_synthetics_external_trace_header( + synthetics_header, + synthetics_info_header, + ): @transient_function_wrapper('newrelic.core.stats_engine', 'StatsEngine.record_transaction') def _validate_synthetics_external_trace_header(wrapped, instance, @@ -67,34 +69,46 @@ def _bind_params(transaction, *args, **kwargs): except: raise else: - if should_exist: - # XXX This validation routine is technically - # broken as the argument to record_transaction() - # is not actually an instance of the Transaction - # object. Instead it is a TransactionNode object. - # The static method generate_request_headers() is - # expecting a Transaction object and not - # TransactionNode. The latter provides attributes - # which are not updatable by the static method - # generate_request_headers(), which it wants to - # update, so would fail. For now what we do is use - # a little proxy wrapper so that updates do not - # fail. The use of this wrapper needs to be - # reviewed and a better way of achieving what is - # required found. - - class _Transaction(object): - def __init__(self, wrapped): - self.__wrapped__ = wrapped - - def __getattr__(self, name): - return getattr(self.__wrapped__, name) - - external_headers = ExternalTrace.generate_request_headers( - _Transaction(transaction)) - assert required_header in external_headers, ( - 'required_header=%r, ''external_headers=%r' % ( - required_header, external_headers)) + # XXX This validation routine is technically + # broken as the argument to record_transaction() + # is not actually an instance of the Transaction + # object. Instead it is a TransactionNode object. + # The static method generate_request_headers() is + # expecting a Transaction object and not + # TransactionNode. The latter provides attributes + # which are not updatable by the static method + # generate_request_headers(), which it wants to + # update, so would fail. For now what we do is use + # a little proxy wrapper so that updates do not + # fail. The use of this wrapper needs to be + # reviewed and a better way of achieving what is + # required found. + + class _Transaction(object): + def __init__(self, wrapped): + self.__wrapped__ = wrapped + + def __getattr__(self, name): + return getattr(self.__wrapped__, name, lambda *args, **kwargs: None) + + external_headers = ExternalTrace.generate_request_headers( + _Transaction(transaction)) + external_headers = {header[0]: header[1] for header in external_headers} + + if synthetics_header: + assert synthetics_header == external_headers["X-NewRelic-Synthetics"], ( + 'synthetics_header=%r, external_headers=%r' % ( + synthetics_header, external_headers)) + else: + assert "X-NewRelic-Synthetics" not in external_headers + + if synthetics_info_header: + assert synthetics_info_header == external_headers["X-NewRelic-Synthetics-Info"], ( + 'synthetics_info_header=%r, external_headers=%r' % ( + synthetics_info_header, external_headers)) + else: + assert "X-NewRelic-Synthetics-Info" not in external_headers + return result diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index 883c3ec59..fab0150d5 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -363,12 +363,26 @@ def make_cross_agent_headers(payload, encoding_key, cat_id): return {"X-NewRelic-Transaction": value, "X-NewRelic-ID": id_value} +def make_synthetics_headers(encoding_key, account_id, resource_id, job_id, monitor_id, type_, initiator, attributes, synthetics_version=1, synthetics_info_version=1): + headers = {} + headers.update(make_synthetics_header(account_id, resource_id, job_id, monitor_id, encoding_key, synthetics_version)) + if type_: + headers.update(make_synthetics_info_header(type_, initiator, attributes, encoding_key, synthetics_info_version)) + return headers + + def make_synthetics_header(account_id, resource_id, job_id, monitor_id, encoding_key, version=1): value = [version, account_id, resource_id, job_id, monitor_id] value = obfuscate(json_encode(value), encoding_key) return {"X-NewRelic-Synthetics": value} +def make_synthetics_info_header(type_, initiator, attributes, encoding_key, version=1): + value = {"version": version, "type": type_, "initiator": initiator, "attributes": attributes} + value = obfuscate(json_encode(value), encoding_key) + return {"X-NewRelic-Synthetics-Info": value} + + def capture_transaction_metrics(metrics_list, full_metrics=None): @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") @catch_background_exceptions @@ -744,6 +758,9 @@ def _bind_params(transaction, *args, **kwargs): return _validate_error_event_sample_data +SYNTHETICS_INTRINSIC_ATTR_NAMES = set(["nr.syntheticsResourceId", "nr.syntheticsJobId", "nr.syntheticsMonitorId", "nr.syntheticsType", "nr.syntheticsInitiator"]) + + def _validate_event_attributes(intrinsics, user_attributes, required_intrinsics, required_user): now = time.time() assert isinstance(intrinsics["timestamp"], int) @@ -793,6 +810,16 @@ def _validate_event_attributes(intrinsics, user_attributes, required_intrinsics, assert intrinsics["nr.syntheticsResourceId"] == res_id assert intrinsics["nr.syntheticsJobId"] == job_id assert intrinsics["nr.syntheticsMonitorId"] == monitor_id + + if "nr.syntheticsType" in required_intrinsics: + type_ = required_intrinsics["nr.syntheticsType"] + initiator = required_intrinsics["nr.syntheticsInitiator"] + assert intrinsics["nr.syntheticsType"] == type_ + assert intrinsics["nr.syntheticsInitiator"] == initiator + + for k, v in required_intrinsics.items(): + if k.startswith("nr.synthetics") and k not in SYNTHETICS_INTRINSIC_ATTR_NAMES: + assert v == intrinsics[k] if "port" in required_intrinsics: assert intrinsics["port"] == required_intrinsics["port"] diff --git a/tests/testing_support/validators/validate_synthetics_event.py b/tests/testing_support/validators/validate_synthetics_event.py index 221cf7e6e..bab176138 100644 --- a/tests/testing_support/validators/validate_synthetics_event.py +++ b/tests/testing_support/validators/validate_synthetics_event.py @@ -51,8 +51,8 @@ def _flatten(event): assert name in flat_event, "name=%r, event=%r" % (name, flat_event) assert flat_event[name] == value, "name=%r, value=%r, event=%r" % (name, value, flat_event) - for name, value in forgone_attrs: - assert name not in flat_event, "name=%r, value=%r, event=%r" % (name, value, flat_event) + for name in forgone_attrs: + assert name not in flat_event, "name=%r, event=%r" % (name, flat_event) except Exception as e: failed.append(e) diff --git a/tox.ini b/tox.ini index bb6102d89..5ee9cbdb2 100644 --- a/tox.ini +++ b/tox.ini @@ -45,121 +45,119 @@ setupdir = {toxinidir} ; Fail tests when interpreters are missing. skip_missing_interpreters = false envlist = - python-adapter_cheroot-{py27,py37,py38,py39,py310,py311}, - python-adapter_daphne-{py37,py38,py39,py310,py311}-daphnelatest, - python-adapter_gevent-{py27,py37,py38,py310,py311}, - python-adapter_gunicorn-{py37,py38,py39,py310,py311}-aiohttp3-gunicornlatest, - python-adapter_hypercorn-{py38,py39,py310,py311}-hypercornlatest, + elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}-elasticsearch07, + elasticsearchserver08-datastore_elasticsearch-{py37,py38,py39,py310,py311,py312,pypy38}-elasticsearch08, + firestore-datastore_firestore-{py37,py38,py39,py310,py311,py312}, + grpc-framework_grpc-{py37,py38,py39,py310,py311,py312}-grpclatest, + grpc-framework_grpc-py27-grpc0125, + kafka-messagebroker_confluentkafka-{py27,py39}-confluentkafka{0107,0106}, + kafka-messagebroker_confluentkafka-{py37,py38,py39,py310,py311,py312}-confluentkafkalatest, + ; confluent-kafka had a bug in 1.8.2's setup.py file which was incompatible with 2.7. + kafka-messagebroker_confluentkafka-{py39}-confluentkafka{0108}, + kafka-messagebroker_kafkapython-{py27,py38}-kafkapython{020001,020000}, + kafka-messagebroker_kafkapython-{pypy27,py27,py37,py38,pypy38}-kafkapythonlatest, + memcached-datastore_bmemcached-{pypy27,py27,py37,py38,py39,py310,py311,py312}-memcached030, + memcached-datastore_memcache-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}-memcached01, + memcached-datastore_pylibmc-{py27,py37}, + memcached-datastore_pymemcache-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, + mongodb-datastore_pymongo-{py27,py37,py38,py39,py310,py311,py312,pypy27}-pymongo03, + mongodb-datastore_pymongo-{py37,py38,py39,py310,py311,py312,pypy27,pypy38}-pymongo04, + mssql-datastore_pymssql-{py37,py38,py39,py310,py311,py312}, + mysql-datastore_mysql-mysql080023-py27, + mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310,py311,py312}, + mysql-datastore_pymysql-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, + postgres-datastore_asyncpg-{py37,py38,py39,py310,py311,py312}, + postgres-datastore_postgresql-{py37,py38,py39}, + postgres-datastore_psycopg2-{py27,py37,py38,py39,py310,py311,py312}-psycopg2latest, + postgres-datastore_psycopg2cffi-{py27,pypy27,py37,py38,py39,py310,py311,py312}-psycopg2cffilatest, + postgres-datastore_pyodbc-{py27,py37,py38,py39,py310,py311,py312}-pyodbclatest, + python-adapter_cheroot-{py27,py37,py38,py39,py310,py311,py312}, + python-adapter_daphne-{py37,py38,py39,py310,py311,py312}-daphnelatest, + python-adapter_gevent-{py27,py37,py38,py310,py311,py312}, + python-adapter_gunicorn-{py37,py38,py39,py310,py311,py312}-aiohttp03-gunicornlatest, + python-adapter_hypercorn-{py38,py39,py310,py311,py312}-hypercornlatest, python-adapter_hypercorn-py38-hypercorn{0010,0011,0012,0013}, - python-adapter_uvicorn-{py37,py38,py39,py310,py311}-uvicorn{014,latest}, + python-adapter_uvicorn-{py37,py38,py39,py310,py311,py312}-uvicornlatest, + python-adapter_uvicorn-py38-uvicorn014, + python-adapter_waitress-{py37,py38,py39,py310,py311,py312}-waitresslatest, python-adapter_waitress-{py37,py38,py39,py310}-waitress02, - python-adapter_waitress-{py37,py38,py39,py310,py311}-waitresslatest, - python-agent_features-{py27,py37,py38,py39,py310,py311}-{with,without}_extensions, + python-adapter_waitress-{py37,py38,py39}-waitress010404, + python-agent_features-{py27,py37,py38,py39,py310,py311,py312}-{with,without}_extensions, python-agent_features-{pypy27,pypy38}-without_extensions, + python-agent_streaming-{py37,py38,py39,py310,py311,py312}-protobuf04-{with,without}_extensions, python-agent_streaming-py27-grpc0125-{with,without}_extensions, - python-agent_streaming-{py37,py38,py39,py310,py311}-protobuf04-{with,without}_extensions, python-agent_streaming-py39-protobuf{03,0319}-{with,without}_extensions, - python-agent_unittests-{py27,py37,py38,py39,py310,py311}-{with,without}_extensions, + python-agent_unittests-{py27,py37,py38,py39,py310,py311,py312}-{with,without}_extensions, python-agent_unittests-{pypy27,pypy38}-without_extensions, - python-application_celery-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, - gearman-application_gearman-{py27,pypy27}, - python-mlmodel_sklearn-{py38,py39,py310,py311}-scikitlearnlatest, - python-mlmodel_sklearn-{py37}-scikitlearn0101, - python-component_djangorestframework-py27-djangorestframework0300, - python-component_djangorestframework-{py37,py38,py39,py310,py311}-djangorestframeworklatest, - python-component_flask_rest-{py37,py38,py39,pypy38}-flaskrestxlatest, + python-application_celery-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, + python-component_djangorestframework-{py37,py38,py39,py310,py311,py312}-djangorestframeworklatest, python-component_flask_rest-{py27,pypy27}-flaskrestx051, - python-component_graphqlserver-{py37,py38,py39,py310,py311}, - python-component_tastypie-{py27,pypy27}-tastypie0143, - python-component_tastypie-{py37,py38,py39,pypy38}-tastypie{0143,latest}, - python-coroutines_asyncio-{py37,py38,py39,py310,py311,pypy38}, - python-cross_agent-{py27,py37,py38,py39,py310,py311}-{with,without}_extensions, + python-component_flask_rest-{py37,py38,py39,pypy38}-flaskrestxlatest, + python-component_graphqlserver-{py37,py38,py39,py310,py311,py312}, + python-component_tastypie-{py37,py38,py39,py310,py311,py312,pypy38}-tastypielatest, + python-coroutines_asyncio-{py37,py38,py39,py310,py311,py312,pypy38}, + python-cross_agent-{py27,py37,py38,py39,py310,py311,py312}-{with,without}_extensions, python-cross_agent-pypy27-without_extensions, - postgres-datastore_asyncpg-{py37,py38,py39,py310,py311}, - memcached-datastore_bmemcached-{pypy27,py27,py37,py38,py39,py310,py311}-memcached030, - elasticsearchserver07-datastore_elasticsearch-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}-elasticsearch07, - elasticsearchserver08-datastore_elasticsearch-{py37,py38,py39,py310,py311,pypy38}-elasticsearch08, - memcached-datastore_memcache-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}-memcached01, - mysql-datastore_mysql-mysql080023-py27, - mysql-datastore_mysql-mysqllatest-{py37,py38,py39,py310,py311}, - firestore-datastore_firestore-{py37,py38,py39,py310,py311}, - postgres-datastore_postgresql-{py37,py38,py39}, - postgres-datastore_psycopg2-{py27,py37,py38,py39,py310,py311}-psycopg2latest - postgres-datastore_psycopg2cffi-{py27,pypy27,py37,py38,py39,py310,py311}-psycopg2cffilatest, - postgres-datastore_pyodbc-{py27,py37,py311}-pyodbclatest - memcached-datastore_pylibmc-{py27,py37}, - memcached-datastore_pymemcache-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, - mongodb-datastore_pymongo-{py27,py37,py38,py39,py310,py311,pypy27}-pymongo{03}, - mongodb-datastore_pymongo-{py37,py38,py39,py310,py311,pypy27,pypy38}-pymongo04, - mssql-datastore_pymssql-{py37,py38,py39,py310,py311}, - mysql-datastore_pymysql-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, - solr-datastore_pysolr-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, - redis-datastore_redis-{py37,py38,py39,py310,py311,pypy38}-redis{0400,latest}, - rediscluster-datastore_rediscluster-{py37,py311,pypy38}-redis{latest}, - python-datastore_sqlite-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, - python-external_botocore-{py37,py38,py39,py310,py311}-botocorelatest, - python-external_botocore-{py311}-botocore128, + python-datastore_sqlite-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, + python-external_botocore-py311-botocore128, + python-external_botocore-{py37,py38,py39,py310,py311,py312}-botocorelatest, python-external_botocore-py310-botocore0125, python-external_feedparser-py27-feedparser{05,06}, - python-external_http-{py27,py37,py38,py39,py310,py311,pypy27}, - python-external_httplib-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, - python-external_httplib2-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, - python-external_httpx-{py37,py38,py39,py310,py311}, - python-external_requests-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, + python-external_http-{py27,py37,py38,py39,py310,py311,py312,pypy27}, + python-external_httplib-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, + python-external_httplib2-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, + python-external_httpx-{py37,py38,py39,py310,py311,py312}, + python-external_requests-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, + python-external_urllib3-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}-urllib3latest, python-external_urllib3-{py27,py37,pypy27}-urllib3{0109}, - python-external_urllib3-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}-urllib3latest, python-framework_aiohttp-{py37,py38,py39,py310,py311,pypy38}-aiohttp03, - python-framework_ariadne-{py37,py38,py39,py310,py311}-ariadnelatest, + python-framework_aiohttp-py312-aiohttp030900rc0, + python-framework_ariadne-{py37,py38,py39,py310,py311,py312}-ariadnelatest, python-framework_ariadne-py37-ariadne{0011,0012,0013}, - python-framework_bottle-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}-bottle0012, - ; CherryPy still uses inspect.getargspec, deprecated in favor of inspect.getfullargspec. Not supported in 3.11 - python-framework_cherrypy-{py37,py38,py39,py310,py311,pypy38}-CherryPylatest, - python-framework_django-{pypy27,py27}-Django0103, - python-framework_django-{pypy27,py27,py37}-Django0108, - python-framework_django-{py39}-Django{0200,0201,0202,0300,0301,latest}, - python-framework_django-{py37,py38,py39,py310,py311}-Django0302, + python-framework_bottle-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}-bottle0012, + python-framework_cherrypy-{py37,py38,py39,py310,py311,py312,pypy38}-CherryPylatest, + python-framework_django-{py37,py38,py39,py310,py311,py312}-Djangolatest, + python-framework_django-{py39}-Django{0202,0300,0301,0302,0401}, python-framework_falcon-{py27,py37,py38,py39,pypy27,pypy38}-falcon0103, + ; Falcon master branch failing on 3.11 and 3.12 currently. + python-framework_falcon-{py311,py312}-falcon0200, python-framework_falcon-{py37,py38,py39,py310,pypy38}-falcon{0200,master}, - # Falcon master branch failing on 3.11 currently. - python-framework_falcon-py311-falcon0200, - python-framework_fastapi-{py37,py38,py39,py310,py311}, + python-framework_fastapi-{py37,py38,py39,py310,py311,py312}, ; temporarily disabling flaskmaster tests - python-framework_flask-{py37,py38,py39,py310,py311,pypy38}-flasklatest, - python-framework_graphene-{py37,py38,py39,py310,py311}-graphenelatest, - python-framework_graphql-{py37,py38,py39,py310,py311,pypy38}-graphqllatest, + python-framework_flask-{py37,py38,py39,py310,py311,py312,pypy38}-flasklatest, + python-framework_graphene-{py37,py38,py39,py310,py311,py312}-graphenelatest, + python-framework_graphql-{py37,py38,py39,py310,py311,py312,pypy38}-graphql03, ; temporarily disabling graphqlmaster tests + python-framework_graphql-{py37,py38,py39,py310,py311,py312,pypy38}-graphqllatest, python-framework_graphql-py37-graphql{0300,0301,0302}, - grpc-framework_grpc-py27-grpc0125, - grpc-framework_grpc-{py37,py38,py39,py310,py311}-grpclatest, - python-framework_pyramid-{pypy27,py27,pypy38,py37,py38,py39,py310,py311}-Pyramid0110-cornice, - python-framework_pyramid-{py37,py38,py39,py310,py311,pypy38}-Pyramidlatest, + python-framework_pyramid-{py37,py38,py39,py310,py311,py312,pypy38}-Pyramidlatest, + python-framework_pyramid-{pypy27,py27,pypy38,py37,py38,py39,py310,py311,py312}-Pyramid0110-cornice, + python-framework_sanic-{py37,py38,py39,py310,py311,py312,pypy38}-saniclatest, python-framework_sanic-{py38,pypy38}-sanic{200904,210300,2109,2112,2203,2290}, - python-framework_sanic-{py37,py38,py39,py310,py311,pypy38}-saniclatest, python-framework_starlette-{py310,pypy38}-starlette{0014,0015,0019,0028}, + python-framework_starlette-{py37,py38,py39,py310,py311,py312,pypy38}-starlettelatest, python-framework_starlette-{py37,py38}-starlette{002001}, - python-framework_starlette-{py37,py38,py39,py310,py311,pypy38}-starlettelatest, - python-framework_strawberry-{py37,py38,py39,py310,py311}-strawberrylatest, - python-logger_logging-{py27,py37,py38,py39,py310,py311,pypy27,pypy38}, - python-logger_loguru-{py37,py38,py39,py310,py311,pypy38}-logurulatest, + python-framework_strawberry-{py37,py38,py39,py310,py311,py312}-strawberrylatest, + python-framework_tornado-{py38,py39,py310,py311,py312}-tornadolatest, + python-framework_tornado-{py38,py39,py310,py311,py312}-tornadomaster, + python-logger_logging-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, + python-logger_loguru-{py37,py38,py39,py310,py311,py312,pypy38}-logurulatest, python-logger_loguru-py39-loguru{06,05}, - python-logger_structlog-{py37,py38,py39,py310,py311,pypy38}-structloglatest, - python-framework_tornado-{py38,py39,py310,py311}-tornadolatest, - python-framework_tornado-{py38,py39,py310,py311}-tornadomaster, - rabbitmq-messagebroker_pika-{py37,py38,py39,py310,py311,pypy38}-pikalatest, - kafka-messagebroker_confluentkafka-{py37,py38,py39,py310,py311}-confluentkafkalatest, - kafka-messagebroker_confluentkafka-{py27,py39}-confluentkafka{0107,0106}, - ; confluent-kafka had a bug in 1.8.2's setup.py file which was incompatible with 2.7. - kafka-messagebroker_confluentkafka-{py39}-confluentkafka{0108}, - kafka-messagebroker_kafkapython-{pypy27,py27,py37,py38,pypy38}-kafkapythonlatest, - kafka-messagebroker_kafkapython-{py27,py38}-kafkapython{020001,020000}, - python-template_genshi-{py27,py37,py311}-genshilatest - python-template_jinja2-{py37,py311}-jinja2latest - python-template_mako-{py27,py37,py310,py311} + python-logger_structlog-{py37,py38,py39,py310,py311,py312,pypy38}-structloglatest, + python-mlmodel_sklearn-{py37}-scikitlearn0101, + python-mlmodel_sklearn-{py38,py39,py310,py311,py312}-scikitlearnlatest, + python-template_genshi-{py27,py37,py38,py39,py310,py311,py312}-genshilatest, + python-template_jinja2-{py37,py38,py39,py310,py311,py312}-jinja2latest, + python-template_mako-{py27,py37,py38,py39,py310,py311,py312}, + rabbitmq-messagebroker_pika-{py37,py38,py39,py310,py311,py312,pypy38}-pikalatest, + redis-datastore_redis-{py37,py38,py39,py310,py311,py312,pypy38}-redis{0400,latest}, + rediscluster-datastore_rediscluster-{py37,py311,py312,pypy38}-redislatest, + solr-datastore_pysolr-{py27,py37,py38,py39,py310,py311,py312,pypy27,pypy38}, [testenv] deps = # Base Dependencies - {py37,py38,py39,py310,py311,pypy38}: pytest==7.2.2 + {py37,py38,py39,py310,py311,py312,pypy38}: pytest==7.2.2 {py27,pypy27}: pytest==4.6.11 iniconfig coverage @@ -171,8 +169,8 @@ deps = adapter_gevent: WSGIProxy2 adapter_gevent: gevent adapter_gevent: urllib3 - adapter_gunicorn-aiohttp1: aiohttp<2.0 - adapter_gunicorn-aiohttp3: aiohttp<4.0 + adapter_gunicorn-aiohttp03: aiohttp<4.0 + adapter_gunicorn-aiohttp03-py312: aiohttp==3.9.0rc0 adapter_gunicorn-gunicorn19: gunicorn<20 adapter_gunicorn-gunicornlatest: gunicorn adapter_hypercorn-hypercornlatest: hypercorn @@ -187,19 +185,17 @@ deps = adapter_waitress-waitress02: waitress<2.1 adapter_waitress-waitresslatest: waitress agent_features: beautifulsoup4 - agent_features-{py37,py38,py39,py310,py311,pypy38}: protobuf + agent_features-{py37,py38,py39,py310,py311,py312,pypy38}: protobuf agent_features-{py27,pypy27}: protobuf<3.18.0 application_celery: celery<6.0 application_celery-{py37,pypy38}: importlib-metadata<5.0 - application_gearman: gearman<3.0.0 mlmodel_sklearn: pandas mlmodel_sklearn: protobuf mlmodel_sklearn: numpy - mlmodel_sklearn: scipy<1.11.0 mlmodel_sklearn-scikitlearnlatest: scikit-learn + mlmodel_sklearn-scikitlearnlatest: scipy mlmodel_sklearn-scikitlearn0101: scikit-learn<1.1 - component_djangorestframework-djangorestframework0300: Django<1.9 - component_djangorestframework-djangorestframework0300: djangorestframework<3.1 + mlmodel_sklearn-scikitlearn0101: scipy<1.11.0 component_djangorestframework-djangorestframeworklatest: Django component_djangorestframework-djangorestframeworklatest: djangorestframework component_flask_rest: flask @@ -215,14 +211,12 @@ deps = component_graphqlserver: Flask component_graphqlserver: markupsafe<2.1 component_graphqlserver: jinja2<3.1 - component_tastypie-tastypie0143: django-tastypie<0.14.4 - component_tastypie-{py27,pypy27}-tastypie0143: django<1.12 - component_tastypie-{py37,py38,py39,py310,py311,pypy38}-tastypie0143: django<3.0.1 - component_tastypie-{py37,py38,py39,py310,py311,pypy38}-tastypie0143: asgiref<3.7.1 # asgiref==3.7.1 only suppport Python 3.10+ + component_tastypie-{py37,py38,py39,py310,py311,py312,pypy38}-tastypie0143: django<3.0.1 + component_tastypie-{py37,py38,py39,py310,py311,py312,pypy38}-tastypie0143: asgiref<3.7.1 # asgiref==3.7.1 only suppport Python 3.10+ component_tastypie-tastypielatest: django-tastypie component_tastypie-tastypielatest: django<4.1 component_tastypie-tastypielatest: asgiref<3.7.1 # asgiref==3.7.1 only suppport Python 3.10+ - coroutines_asyncio-{py37,py38,py39,py310,py311}: uvloop + coroutines_asyncio-{py37,py38,py39,py310,py311,py312}: uvloop cross_agent: mock==1.0.1 cross_agent: requests datastore_asyncpg: asyncpg @@ -254,7 +248,7 @@ deps = external_botocore-botocorelatest: boto3 external_botocore-botocore128: botocore<1.29 external_botocore-botocore0125: botocore<1.26 - external_botocore-{py37,py38,py39,py310,py311}: moto[awslambda,ec2,iam,sqs] + external_botocore-{py37,py38,py39,py310,py311,py312}: moto[awslambda,ec2,iam,sqs] external_botocore-py27: rsa<4.7.1 external_botocore-py27: moto[awslambda,ec2,iam,sqs]<2.0 external_feedparser-feedparser05: feedparser<6 @@ -265,7 +259,8 @@ deps = external_requests: requests external_urllib3-urllib30109: urllib3<1.10 external_urllib3-urllib3latest: urllib3 - framework_aiohttp-aiohttp03: aiohttp + framework_aiohttp-aiohttp03: aiohttp<4 + framework_aiohttp-aiohttp030900rc0: aiohttp==3.9.0rc0 framework_ariadne-ariadnelatest: ariadne framework_ariadne-ariadne0011: ariadne<0.12 framework_ariadne-ariadne0012: ariadne<0.13 @@ -274,15 +269,12 @@ deps = framework_bottle: jinja2<3.1 framework_bottle: markupsafe<2.1 framework_cherrypy: routes - framework_cherrypy-CherryPylatest: CherryPy - framework_django-Django0103: Django<1.4 - framework_django-Django0108: Django<1.9 - framework_django-Django0200: Django<2.1 - framework_django-Django0201: Django<2.2 + framework_cherrypy: CherryPy framework_django-Django0202: Django<2.3 framework_django-Django0300: Django<3.1 framework_django-Django0301: Django<3.2 framework_django-Django0302: Django<3.3 + framework_django-Django0401: Django<4.2 framework_django-Djangolatest: Django framework_django-Djangomaster: https://github.com/django/django/archive/main.zip framework_falcon-falcon0103: falcon<1.4 @@ -297,7 +289,8 @@ deps = framework_flask-flaskmaster: https://github.com/pallets/werkzeug/archive/main.zip framework_flask-flaskmaster: https://github.com/pallets/flask/archive/main.zip#egg=flask[async] framework_graphene-graphenelatest: graphene - framework_graphql-graphqllatest: graphql-core<4 + framework_graphql-graphqllatest: graphql-core + framework_graphql-graphql03: graphql-core<4 framework_graphql-graphql0300: graphql-core<3.1 framework_graphql-graphql0301: graphql-core<3.2 framework_graphql-graphql0302: graphql-core<3.3 @@ -313,7 +306,6 @@ deps = protobuf04: protobuf<5 framework_pyramid: routes framework_pyramid-cornice: cornice!=5.0.0 - framework_pyramid-Pyramid0110: Pyramid<1.11 framework_pyramid-Pyramidlatest: Pyramid framework_sanic-sanic200904: sanic<20.9.5 framework_sanic-sanic210300: sanic<21.3.1 @@ -367,9 +359,9 @@ setenv = without_extensions: NEW_RELIC_EXTENSIONS = false agent_features: NEW_RELIC_APDEX_T = 1000 framework_grpc: PYTHONPATH={toxinidir}/tests/:{toxinidir}/tests/framework_grpc/sample_application - framework_tornado-{py38,py39,py310,py311}: PYCURL_SSL_LIBRARY=openssl - framework_tornado-{py38,py39,py310,py311}: LDFLAGS=-L/usr/local/opt/openssl/lib - framework_tornado-{py38,py39,py310,py311}: CPPFLAGS=-I/usr/local/opt/openssl/include + framework_tornado-{py38,py39,py310,py311,py312}: PYCURL_SSL_LIBRARY=openssl + framework_tornado-{py38,py39,py310,py311,py312}: LDFLAGS=-L/usr/local/opt/openssl/lib + framework_tornado-{py38,py39,py310,py311,py312}: CPPFLAGS=-I/usr/local/opt/openssl/include passenv = NEW_RELIC_DEVELOPER_MODE @@ -384,7 +376,7 @@ commands = framework_grpc: --grpc_python_out={toxinidir}/tests/framework_grpc/sample_application \ framework_grpc: /{toxinidir}/tests/framework_grpc/sample_application/sample_application.proto - framework_tornado-{py38,py39,py310,py311}: pip install --ignore-installed --config-settings="--build-option=--with-openssl" pycurl + framework_tornado-{py38,py39,py310,py311,py312}: pip install --ignore-installed --config-settings="--build-option=--with-openssl" pycurl coverage run -m pytest -v [] allowlist_externals={toxinidir}/.github/scripts/* @@ -407,7 +399,6 @@ changedir = agent_streaming: tests/agent_streaming agent_unittests: tests/agent_unittests application_celery: tests/application_celery - application_gearman: tests/application_gearman mlmodel_sklearn: tests/mlmodel_sklearn component_djangorestframework: tests/component_djangorestframework component_flask_rest: tests/component_flask_rest