Move DD code into its own directory (#6)

This commit is contained in:
Diego Hurtado
2020-04-08 11:39:44 -06:00
committed by GitHub
parent 72b40ba5f9
commit 5aee3ce32e
611 changed files with 0 additions and 0 deletions

View File

@ -0,0 +1,27 @@
from ..vendor import debtcollector
# https://stackoverflow.com/a/26853961
def merge_dicts(x, y):
"""Returns a copy of y merged into x."""
z = x.copy() # start with x's keys and values
z.update(y) # modifies z with y's keys and values & returns None
return z
def get_module_name(module):
"""Returns a module's name or None if one cannot be found.
Relevant PEP: https://www.python.org/dev/peps/pep-0451/
"""
if hasattr(module, "__spec__"):
return module.__spec__.name
return getattr(module, "__name__", None)
# Based on: https://stackoverflow.com/a/7864317
class removed_classproperty(property):
def __get__(self, cls, owner):
debtcollector.deprecate(
"Usage of ddtrace.ext.AppTypes is not longer supported, please use ddtrace.ext.SpanTypes"
)
return classmethod(self.fget).__get__(None, owner)()

View File

@ -0,0 +1,37 @@
class AttrDict(dict):
"""
dict implementation that allows for item attribute access
Example::
data = AttrDict()
data['key'] = 'value'
print(data['key'])
data.key = 'new-value'
print(data.key)
# Convert an existing `dict`
data = AttrDict(dict(key='value'))
print(data.key)
"""
def __getattr__(self, key):
if key in self:
return self[key]
return object.__getattribute__(self, key)
def __setattr__(self, key, value):
# 1) Ensure if the key exists from a dict key we always prefer that
# 2) If we do not have an existing key but we do have an attr, set that
# 3) No existing key or attr exists, so set a key
if key in self:
# Update any existing key
self[key] = value
elif hasattr(self, key):
# Allow overwriting an existing attribute, e.g. `self.global_config = dict()`
object.__setattr__(self, key, value)
else:
# Set a new key
self[key] = value

View File

@ -0,0 +1,11 @@
import sys
import os
def get_application_name():
"""Attempts to find the application name using system arguments."""
if hasattr(sys, "argv") and sys.argv[0]:
app_name = os.path.basename(sys.argv[0])
else:
app_name = None
return app_name

View File

@ -0,0 +1,61 @@
import warnings
from functools import wraps
class RemovedInDDTrace10Warning(DeprecationWarning):
pass
def format_message(name, message, version):
"""Message formatter to create `DeprecationWarning` messages
such as:
'fn' is deprecated and will be remove in future versions (1.0).
"""
return "'{}' is deprecated and will be remove in future versions{}. {}".format(
name, " ({})".format(version) if version else "", message,
)
def warn(message, stacklevel=2):
"""Helper function used as a ``DeprecationWarning``."""
warnings.warn(message, RemovedInDDTrace10Warning, stacklevel=stacklevel)
def deprecation(name="", message="", version=None):
"""Function to report a ``DeprecationWarning``. Bear in mind that `DeprecationWarning`
are ignored by default so they're not available in user logs. To show them,
the application must be launched with a special flag:
$ python -Wall script.py
This approach is used by most of the frameworks, including Django
(ref: https://docs.djangoproject.com/en/2.0/howto/upgrade-version/#resolving-deprecation-warnings)
"""
msg = format_message(name, message, version)
warn(msg, stacklevel=4)
def deprecated(message="", version=None):
"""Decorator function to report a ``DeprecationWarning``. Bear
in mind that `DeprecationWarning` are ignored by default so they're
not available in user logs. To show them, the application must be launched
with a special flag:
$ python -Wall script.py
This approach is used by most of the frameworks, including Django
(ref: https://docs.djangoproject.com/en/2.0/howto/upgrade-version/#resolving-deprecation-warnings)
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
msg = format_message(func.__name__, message, version)
warn(msg, stacklevel=3)
return func(*args, **kwargs)
return wrapper
return decorator

View File

@ -0,0 +1,82 @@
import os
from .deprecation import deprecation
def get_env(integration, variable, default=None):
"""Retrieves environment variables value for the given integration. It must be used
for consistency between integrations. The implementation is backward compatible
with legacy nomenclature:
* `DATADOG_` is a legacy prefix with lower priority
* `DD_` environment variables have the highest priority
* the environment variable is built concatenating `integration` and `variable`
arguments
* return `default` otherwise
"""
key = "{}_{}".format(integration, variable).upper()
legacy_env = "DATADOG_{}".format(key)
env = "DD_{}".format(key)
value = os.getenv(env)
legacy = os.getenv(legacy_env)
if legacy:
# Deprecation: `DATADOG_` variables are deprecated
deprecation(
name="DATADOG_", message="Use `DD_` prefix instead", version="1.0.0",
)
value = value or legacy
return value if value else default
def deep_getattr(obj, attr_string, default=None):
"""
Returns the attribute of `obj` at the dotted path given by `attr_string`
If no such attribute is reachable, returns `default`
>>> deep_getattr(cass, 'cluster')
<cassandra.cluster.Cluster object at 0xa20c350
>>> deep_getattr(cass, 'cluster.metadata.partitioner')
u'org.apache.cassandra.dht.Murmur3Partitioner'
>>> deep_getattr(cass, 'i.dont.exist', default='default')
'default'
"""
attrs = attr_string.split(".")
for attr in attrs:
try:
obj = getattr(obj, attr)
except AttributeError:
return default
return obj
def asbool(value):
"""Convert the given String to a boolean object.
Accepted values are `True` and `1`.
"""
if value is None:
return False
if isinstance(value, bool):
return value
return value.lower() in ("true", "1")
def flatten_dict(d, sep=".", prefix=""):
"""
Returns a normalized dict of depth 1 with keys in order of embedding
"""
# adapted from https://stackoverflow.com/a/19647596
return (
{prefix + sep + k if prefix else k: v for kk, vv in d.items() for k, v in flatten_dict(vv, sep, kk).items()}
if isinstance(d, dict)
else {prefix: d}
)

View File

@ -0,0 +1,197 @@
"""
This module is based off of wrapt.importer (wrapt==1.11.0)
https://github.com/GrahamDumpleton/wrapt/blob/4bcd190457c89e993ffcfec6dad9e9969c033e9e/src/wrapt/importer.py#L127-L136
The reasoning for this is that wrapt.importer does not provide a mechanism to
remove the import hooks and that wrapt removes the hooks after they are fired.
So this module differs from wrapt.importer in that:
- removes unnecessary functionality (like allowing hooks to be import paths)
- deregister_post_import_hook is introduced to remove hooks
- the values of _post_import_hooks can only be lists (instead of allowing None)
- notify_module_loaded is modified to not remove the hooks when they are
fired.
"""
import sys
import threading
from ..compat import PY3
from ..internal.logger import get_logger
from ..utils import get_module_name
from ..vendor.wrapt.decorators import synchronized
log = get_logger(__name__)
_post_import_hooks = {}
_post_import_hooks_init = False
_post_import_hooks_lock = threading.RLock()
@synchronized(_post_import_hooks_lock)
def register_post_import_hook(name, hook):
"""
Registers a module import hook, ``hook`` for a module with name ``name``.
If the module is already imported the hook is called immediately and a
debug message is logged since this should not be expected in our use-case.
:param name: Name of the module (full dotted path)
:type name: str
:param hook: Callable to be invoked with the module when it is imported.
:type hook: Callable
:return:
"""
# Automatically install the import hook finder if it has not already
# been installed.
global _post_import_hooks_init
if not _post_import_hooks_init:
_post_import_hooks_init = True
sys.meta_path.insert(0, ImportHookFinder())
hooks = _post_import_hooks.get(name, [])
if hook in hooks:
log.debug('hook "%s" already exists on module "%s"', hook, name)
return
module = sys.modules.get(name, None)
# If the module has been imported already fire the hook and log a debug msg.
if module:
log.debug('module "%s" already imported, firing hook', name)
hook(module)
hooks.append(hook)
_post_import_hooks[name] = hooks
@synchronized(_post_import_hooks_lock)
def notify_module_loaded(module):
"""
Indicate that a module has been loaded. Any post import hooks which were
registered for the target module will be invoked.
Any raised exceptions will be caught and an error message indicating that
the hook failed.
:param module: The module being loaded
:type module: ``types.ModuleType``
"""
name = get_module_name(module)
hooks = _post_import_hooks.get(name, [])
for hook in hooks:
try:
hook(module)
except Exception:
log.warning('hook "%s" for module "%s" failed', hook, name, exc_info=True)
class _ImportHookLoader(object):
"""
A custom module import finder. This intercepts attempts to import
modules and watches out for attempts to import target modules of
interest. When a module of interest is imported, then any post import
hooks which are registered will be invoked.
"""
def load_module(self, fullname):
module = sys.modules[fullname]
notify_module_loaded(module)
return module
class _ImportHookChainedLoader(object):
def __init__(self, loader):
self.loader = loader
def load_module(self, fullname):
module = self.loader.load_module(fullname)
notify_module_loaded(module)
return module
class ImportHookFinder:
def __init__(self):
self.in_progress = {}
@synchronized(_post_import_hooks_lock)
def find_module(self, fullname, path=None):
# If the module being imported is not one we have registered
# post import hooks for, we can return immediately. We will
# take no further part in the importing of this module.
if fullname not in _post_import_hooks:
return None
# When we are interested in a specific module, we will call back
# into the import system a second time to defer to the import
# finder that is supposed to handle the importing of the module.
# We set an in progress flag for the target module so that on
# the second time through we don't trigger another call back
# into the import system and cause a infinite loop.
if fullname in self.in_progress:
return None
self.in_progress[fullname] = True
# Now call back into the import system again.
try:
if PY3:
# For Python 3 we need to use find_spec().loader
# from the importlib.util module. It doesn't actually
# import the target module and only finds the
# loader. If a loader is found, we need to return
# our own loader which will then in turn call the
# real loader to import the module and invoke the
# post import hooks.
try:
import importlib.util
loader = importlib.util.find_spec(fullname).loader
except (ImportError, AttributeError):
loader = importlib.find_loader(fullname, path)
if loader:
return _ImportHookChainedLoader(loader)
else:
# For Python 2 we don't have much choice but to
# call back in to __import__(). This will
# actually cause the module to be imported. If no
# module could be found then ImportError will be
# raised. Otherwise we return a loader which
# returns the already loaded module and invokes
# the post import hooks.
__import__(fullname)
return _ImportHookLoader()
finally:
del self.in_progress[fullname]
@synchronized(_post_import_hooks_lock)
def deregister_post_import_hook(modulename, hook):
"""
Deregisters post import hooks for a module given the module name and a hook
that was previously installed.
:param modulename: Name of the module the hook is installed on.
:type: str
:param hook: The hook to remove (the function itself)
:type hook: Callable
:return: whether a hook was removed or not
"""
if modulename not in _post_import_hooks:
return False
hooks = _post_import_hooks[modulename]
try:
hooks.remove(hook)
return True
except ValueError:
return False

View File

@ -0,0 +1,9 @@
def normalize_header_name(header_name):
"""
Normalizes an header name to lower case, stripping all its leading and trailing white spaces.
:param header_name: the header name to normalize
:type header_name: str
:return: the normalized header name
:rtype: str
"""
return header_name.strip().lower() if header_name is not None else None

View File

@ -0,0 +1,33 @@
from __future__ import absolute_import
from importlib import import_module
class require_modules(object):
"""Context manager to check the availability of required modules."""
def __init__(self, modules):
self._missing_modules = []
for module in modules:
try:
import_module(module)
except ImportError:
self._missing_modules.append(module)
def __enter__(self):
return self._missing_modules
def __exit__(self, exc_type, exc_value, traceback):
return False
def func_name(f):
"""Return a human readable version of the function's name."""
if hasattr(f, "__module__"):
return "%s.%s" % (f.__module__, getattr(f, "__name__", f.__class__.__name__))
return getattr(f, "__name__", f.__class__.__name__)
def module_name(instance):
"""Return the instance module name."""
return instance.__class__.__module__.split(".")[0]

View File

@ -0,0 +1,19 @@
# Borrowed from: https://stackoverflow.com/questions/20656135/python-deep-merge-dictionary-data#20666342
def deepmerge(source, destination):
"""
Merge the first provided ``dict`` into the second.
:param dict source: The ``dict`` to merge into ``destination``
:param dict destination: The ``dict`` that should get updated
:rtype: dict
:returns: ``destination`` modified
"""
for key, value in source.items():
if isinstance(value, dict):
# get node or create one
node = destination.setdefault(key, {})
deepmerge(value, node)
else:
destination[key] = value
return destination

View File

@ -0,0 +1,57 @@
from ..vendor import monotonic
class StopWatch(object):
"""A simple timer/stopwatch helper class.
Not thread-safe (when a single watch is mutated by multiple threads at
the same time). Thread-safe when used by a single thread (not shared) or
when operations are performed in a thread-safe manner on these objects by
wrapping those operations with locks.
It will use the `monotonic`_ pypi library to find an appropriate
monotonically increasing time providing function (which typically varies
depending on operating system and Python version).
.. _monotonic: https://pypi.python.org/pypi/monotonic/
"""
def __init__(self):
self._started_at = None
self._stopped_at = None
def start(self):
"""Starts the watch."""
self._started_at = monotonic.monotonic()
return self
def elapsed(self):
"""Get how many seconds have elapsed.
:return: Number of seconds elapsed
:rtype: float
"""
# NOTE: datetime.timedelta does not support nanoseconds, so keep a float here
if self._started_at is None:
raise RuntimeError("Can not get the elapsed time of a stopwatch" " if it has not been started/stopped")
if self._stopped_at is None:
now = monotonic.monotonic()
else:
now = self._stopped_at
return now - self._started_at
def __enter__(self):
"""Starts the watch."""
self.start()
return self
def __exit__(self, tp, value, traceback):
"""Stops the watch."""
self.stop()
def stop(self):
"""Stops the watch."""
if self._started_at is None:
raise RuntimeError("Can not stop a stopwatch that has not been" " started")
self._stopped_at = monotonic.monotonic()
return self

View File

@ -0,0 +1,62 @@
from ddtrace.vendor import wrapt
import inspect
from .deprecation import deprecated
def unwrap(obj, attr):
f = getattr(obj, attr, None)
if f and isinstance(f, wrapt.ObjectProxy) and hasattr(f, "__wrapped__"):
setattr(obj, attr, f.__wrapped__)
@deprecated("`wrapt` library is used instead", version="1.0.0")
def safe_patch(patchable, key, patch_func, service, meta, tracer):
""" takes patch_func (signature: takes the orig_method that is
wrapped in the monkey patch == UNBOUND + service and meta) and
attach the patched result to patchable at patchable.key
- If this is the module/class we can rely on methods being unbound, and just have to
update the __dict__
- If this is an instance, we have to unbind the current and rebind our
patched method
- If patchable is an instance and if we've already patched at the module/class level
then patchable[key] contains an already patched command!
To workaround this, check if patchable or patchable.__class__ are ``_dogtraced``
If is isn't, nothing to worry about, patch the key as usual
But if it is, search for a '__dd_orig_{key}' method on the class, which is
the original unpatched method we wish to trace.
"""
def _get_original_method(thing, key):
orig = None
if hasattr(thing, "_dogtraced"):
# Search for original method
orig = getattr(thing, "__dd_orig_{}".format(key), None)
else:
orig = getattr(thing, key)
# Set it for the next time we attempt to patch `thing`
setattr(thing, "__dd_orig_{}".format(key), orig)
return orig
if inspect.isclass(patchable) or inspect.ismodule(patchable):
orig = _get_original_method(patchable, key)
if not orig:
# Should never happen
return
elif hasattr(patchable, "__class__"):
orig = _get_original_method(patchable.__class__, key)
if not orig:
# Should never happen
return
else:
return
dest = patch_func(orig, service, meta, tracer)
if inspect.isclass(patchable) or inspect.ismodule(patchable):
setattr(patchable, key, dest)
elif hasattr(patchable, "__class__"):
setattr(patchable, key, dest.__get__(patchable, patchable.__class__))