Move DD code into its own directory (#6)

This commit is contained in:
Diego Hurtado
2020-04-08 11:39:44 -06:00
committed by GitHub
parent 72b40ba5f9
commit 5aee3ce32e
611 changed files with 0 additions and 0 deletions

225
reference/docs/Makefile Normal file
View File

@ -0,0 +1,225 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " epub3 to make an epub3"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ddtrace.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ddtrace.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/ddtrace"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ddtrace"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: epub3
epub3:
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
@echo
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.PHONY: dummy
dummy:
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
@echo
@echo "Build finished. Dummy builder generates no files."

9
reference/docs/_templates/nav.html vendored Normal file
View File

@ -0,0 +1,9 @@
{{ toctree(includehidden=theme_sidebar_includehidden, collapse=theme_sidebar_collapse) }}
{% if theme_extra_nav_links %}
<hr />
<ul>
{% for text, uri in theme_extra_nav_links.items() %}
<li class="toctree-l1"><a href="{{ uri }}">{{ text }}</a></li>
{% endfor %}
</ul>
{% endif %}

View File

@ -0,0 +1,634 @@
Advanced Usage
==============
Agent Configuration
-------------------
If the Datadog Agent is on a separate host from your application, you can modify
the default ``ddtrace.tracer`` object to utilize another hostname and port. Here
is a small example showcasing this::
from ddtrace import tracer
tracer.configure(hostname=<YOUR_HOST>, port=<YOUR_PORT>, https=<True/False>)
By default, these will be set to ``localhost``, ``8126``, and ``False`` respectively.
You can also use a Unix Domain Socket to connect to the agent::
from ddtrace import tracer
tracer.configure(uds_path="/path/to/socket")
Distributed Tracing
-------------------
To trace requests across hosts, the spans on the secondary hosts must be linked together by setting `trace_id`, `parent_id` and `sampling_priority`.
- On the server side, it means to read propagated attributes and set them to the active tracing context.
- On the client side, it means to propagate the attributes, commonly as a header/metadata.
`ddtrace` already provides default propagators but you can also implement your own.
Web Frameworks
^^^^^^^^^^^^^^
Some web framework integrations support distributed tracing out of the box.
Supported web frameworks:
+-------------------+---------+
| Framework/Library | Enabled |
+===================+=========+
| :ref:`aiohttp` | True |
+-------------------+---------+
| :ref:`bottle` | True |
+-------------------+---------+
| :ref:`django` | True |
+-------------------+---------+
| :ref:`falcon` | True |
+-------------------+---------+
| :ref:`flask` | True |
+-------------------+---------+
| :ref:`pyramid` | True |
+-------------------+---------+
| :ref:`requests` | True |
+-------------------+---------+
| :ref:`tornado` | True |
+-------------------+---------+
HTTP Client
^^^^^^^^^^^
For distributed tracing to work, necessary tracing information must be passed
alongside a request as it flows through the system. When the request is handled
on the other side, the metadata is retrieved and the trace can continue.
To propagate the tracing information, HTTP headers are used to transmit the
required metadata to piece together the trace.
.. autoclass:: ddtrace.propagation.http.HTTPPropagator
:members:
Custom
^^^^^^
You can manually propagate your tracing context over your RPC protocol. Here is
an example assuming that you have `rpc.call` function that call a `method` and
propagate a `rpc_metadata` dictionary over the wire::
# Implement your own context propagator
class MyRPCPropagator(object):
def inject(self, span_context, rpc_metadata):
rpc_metadata.update({
'trace_id': span_context.trace_id,
'span_id': span_context.span_id,
'sampling_priority': span_context.sampling_priority,
})
def extract(self, rpc_metadata):
return Context(
trace_id=rpc_metadata['trace_id'],
span_id=rpc_metadata['span_id'],
sampling_priority=rpc_metadata['sampling_priority'],
)
# On the parent side
def parent_rpc_call():
with tracer.trace("parent_span") as span:
rpc_metadata = {}
propagator = MyRPCPropagator()
propagator.inject(span.context, rpc_metadata)
method = "<my rpc method>"
rpc.call(method, metadata)
# On the child side
def child_rpc_call(method, rpc_metadata):
propagator = MyRPCPropagator()
context = propagator.extract(rpc_metadata)
tracer.context_provider.activate(context)
with tracer.trace("child_span") as span:
span.set_meta('my_rpc_method', method)
Sampling
--------
.. _`Priority Sampling`:
Priority Sampling
^^^^^^^^^^^^^^^^^
To learn about what sampling is check out our documentation `here
<https://docs.datadoghq.com/tracing/getting_further/trace_sampling_and_storage/#priority-sampling-for-distributed-tracing>`_.
By default priorities are set on a trace by a sampler. The sampler can set the
priority to the following values:
- ``AUTO_REJECT``: the sampler automatically rejects the trace
- ``AUTO_KEEP``: the sampler automatically keeps the trace
Priority sampling is enabled by default.
When enabled, the sampler will automatically assign a priority to your traces,
depending on their service and volume.
This ensures that your sampled distributed traces will be complete.
You can also set this priority manually to either drop an uninteresting trace or
to keep an important one.
To do this, set the ``context.sampling_priority`` to one of the following:
- ``USER_REJECT``: the user asked to reject the trace
- ``USER_KEEP``: the user asked to keep the trace
When not using distributed tracing, you may change the priority at any time, as
long as the trace is not finished yet.
But it has to be done before any context propagation (fork, RPC calls) to be
effective in a distributed context.
Changing the priority after context has been propagated causes different parts
of a distributed trace to use different priorities. Some parts might be kept,
some parts might be rejected, and this can cause the trace to be partially
stored and remain incomplete.
If you change the priority, we recommend you do it as soon as possible, when the
root span has just been created::
from ddtrace.ext.priority import USER_REJECT, USER_KEEP
context = tracer.context_provider.active()
# indicate to not keep the trace
context.sampling_priority = USER_REJECT
Client Sampling
^^^^^^^^^^^^^^^
Client sampling enables the sampling of traces before they are sent to the
Agent. This can provide some performance benefit as the traces will be
dropped in the client.
The ``RateSampler`` randomly samples a percentage of traces::
from ddtrace.sampler import RateSampler
# Sample rate is between 0 (nothing sampled) to 1 (everything sampled).
# Keep 20% of the traces.
sample_rate = 0.2
tracer.sampler = RateSampler(sample_rate)
Trace Search & Analytics
------------------------
Use `Trace Search & Analytics <https://docs.datadoghq.com/tracing/visualization/search/>`_ to filter application performance metrics and APM Events by user-defined tags. An APM event is generated every time a trace is generated.
Enabling APM events for all web frameworks can be accomplished by setting the environment variable ``DD_TRACE_ANALYTICS_ENABLED=true``:
* :ref:`aiohttp`
* :ref:`bottle`
* :ref:`django`
* :ref:`falcon`
* :ref:`flask`
* :ref:`molten`
* :ref:`pyramid`
* :ref:`requests`
* :ref:`tornado`
For most libraries, APM events can be enabled with the environment variable ``DD_{INTEGRATION}_ANALYTICS_ENABLED=true``:
+----------------------+----------------------------------------+
| Library | Environment Variable |
+======================+========================================+
| :ref:`aiobotocore` | ``DD_AIOBOTOCORE_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`aiopg` | ``DD_AIOPG_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`boto` | ``DD_BOTO_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`botocore` | ``DD_BOTOCORE_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`bottle` | ``DD_BOTTLE_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`cassandra` | ``DD_CASSANDRA_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`elasticsearch` | ``DD_ELASTICSEARCH_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`falcon` | ``DD_FALCON_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`flask` | ``DD_FLASK_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`flask_cache` | ``DD_FLASK_CACHE_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`grpc` | ``DD_GRPC_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`httplib` | ``DD_HTTPLIB_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`kombu` | ``DD_KOMBU_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`molten` | ``DD_MOLTEN_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`pylibmc` | ``DD_PYLIBMC_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`pymemcache` | ``DD_PYMEMCACHE_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`pymongo` | ``DD_PYMONGO_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`redis` | ``DD_REDIS_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`rediscluster` | ``DD_REDISCLUSTER_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`sqlalchemy` | ``DD_SQLALCHEMY_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
| :ref:`vertica` | ``DD_VERTICA_ANALYTICS_ENABLED`` |
+----------------------+----------------------------------------+
For datastore libraries that extend another, use the setting for the underlying library:
+------------------------+----------------------------------+
| Library | Environment Variable |
+========================+==================================+
| :ref:`mongoengine` | ``DD_PYMONGO_ANALYTICS_ENABLED`` |
+------------------------+----------------------------------+
| :ref:`mysql-connector` | ``DD_DBAPI2_ANALYTICS_ENABLED`` |
+------------------------+----------------------------------+
| :ref:`mysqldb` | ``DD_DBAPI2_ANALYTICS_ENABLED`` |
+------------------------+----------------------------------+
| :ref:`psycopg2` | ``DD_DBAPI2_ANALYTICS_ENABLED`` |
+------------------------+----------------------------------+
| :ref:`pymysql` | ``DD_DBAPI2_ANALYTICS_ENABLED`` |
+------------------------+----------------------------------+
| :ref:`sqllite` | ``DD_DBAPI2_ANALYTICS_ENABLED`` |
+------------------------+----------------------------------+
Where environment variables are not used for configuring the tracer, the instructions for configuring trace analytics is provided in the library documentation:
* :ref:`aiohttp`
* :ref:`django`
* :ref:`pyramid`
* :ref:`requests`
* :ref:`tornado`
Resolving deprecation warnings
------------------------------
Before upgrading, its a good idea to resolve any deprecation warnings raised by your project.
These warnings must be fixed before upgrading, otherwise the ``ddtrace`` library
will not work as expected. Our deprecation messages include the version where
the behavior is altered or removed.
In Python, deprecation warnings are silenced by default. To enable them you may
add the following flag or environment variable::
$ python -Wall app.py
# or
$ PYTHONWARNINGS=all python app.py
Trace Filtering
---------------
It is possible to filter or modify traces before they are sent to the Agent by
configuring the tracer with a filters list. For instance, to filter out
all traces of incoming requests to a specific url::
Tracer.configure(settings={
'FILTERS': [
FilterRequestsOnUrl(r'http://test\.example\.com'),
],
})
All the filters in the filters list will be evaluated sequentially
for each trace and the resulting trace will either be sent to the Agent or
discarded depending on the output.
**Use the standard filters**
The library comes with a ``FilterRequestsOnUrl`` filter that can be used to
filter out incoming requests to specific urls:
.. autoclass:: ddtrace.filters.FilterRequestsOnUrl
:members:
**Write a custom filter**
Creating your own filters is as simple as implementing a class with a
``process_trace`` method and adding it to the filters parameter of
Tracer.configure. process_trace should either return a trace to be fed to the
next step of the pipeline or ``None`` if the trace should be discarded::
class FilterExample(object):
def process_trace(self, trace):
# write here your logic to return the `trace` or None;
# `trace` instance is owned by the thread and you can alter
# each single span or the whole trace if needed
# And then instantiate it with
filters = [FilterExample()]
Tracer.configure(settings={'FILTERS': filters})
(see filters.py for other example implementations)
.. _`Logs Injection`:
Logs Injection
--------------
.. automodule:: ddtrace.contrib.logging
HTTP layer
----------
Query String Tracing
^^^^^^^^^^^^^^^^^^^^
It is possible to store the query string of the URL — the part after the ``?``
in your URL — in the ``url.query.string`` tag.
Configuration can be provided both at the global level and at the integration level.
Examples::
from ddtrace import config
# Global config
config.http.trace_query_string = True
# Integration level config, e.g. 'falcon'
config.falcon.http.trace_query_string = True
.. _http-headers-tracing:
Headers tracing
^^^^^^^^^^^^^^^
For a selected set of integrations, it is possible to store http headers from both requests and responses in tags.
Configuration can be provided both at the global level and at the integration level.
Examples::
from ddtrace import config
# Global config
config.trace_headers([
'user-agent',
'transfer-encoding',
])
# Integration level config, e.g. 'falcon'
config.falcon.http.trace_headers([
'user-agent',
'some-other-header',
])
The following rules apply:
- headers configuration is based on a whitelist. If a header does not appear in the whitelist, it won't be traced.
- headers configuration is case-insensitive.
- if you configure a specific integration, e.g. 'requests', then such configuration overrides the default global
configuration, only for the specific integration.
- if you do not configure a specific integration, then the default global configuration applies, if any.
- if no configuration is provided (neither global nor integration-specific), then headers are not traced.
Once you configure your application for tracing, you will have the headers attached to the trace as tags, with a
structure like in the following example::
http {
method GET
request {
headers {
user_agent my-app/0.0.1
}
}
response {
headers {
transfer_encoding chunked
}
}
status_code 200
url https://api.github.com/events
}
.. _adv_opentracing:
OpenTracing
-----------
The Datadog opentracer can be configured via the ``config`` dictionary
parameter to the tracer which accepts the following described fields. See below
for usage.
+---------------------+----------------------------------------+---------------+
| Configuration Key | Description | Default Value |
+=====================+========================================+===============+
| `enabled` | enable or disable the tracer | `True` |
+---------------------+----------------------------------------+---------------+
| `debug` | enable debug logging | `False` |
+---------------------+----------------------------------------+---------------+
| `agent_hostname` | hostname of the Datadog agent to use | `localhost` |
+---------------------+----------------------------------------+---------------+
| `agent_https` | use https to connect to the agent | `False` |
+---------------------+----------------------------------------+---------------+
| `agent_port` | port the Datadog agent is listening on | `8126` |
+---------------------+----------------------------------------+---------------+
| `global_tags` | tags that will be applied to each span | `{}` |
+---------------------+----------------------------------------+---------------+
| `sampler` | see `Sampling`_ | `AllSampler` |
+---------------------+----------------------------------------+---------------+
| `priority_sampling` | see `Priority Sampling`_ | `True` |
+---------------------+----------------------------------------+---------------+
| `settings` | see `Advanced Usage`_ | `{}` |
+---------------------+----------------------------------------+---------------+
Usage
^^^^^
**Manual tracing**
To explicitly trace::
import time
import opentracing
from ddtrace.opentracer import Tracer, set_global_tracer
def init_tracer(service_name):
config = {
'agent_hostname': 'localhost',
'agent_port': 8126,
}
tracer = Tracer(service_name, config=config)
set_global_tracer(tracer)
return tracer
def my_operation():
span = opentracing.tracer.start_span('my_operation_name')
span.set_tag('my_interesting_tag', 'my_interesting_value')
time.sleep(0.05)
span.finish()
init_tracer('my_service_name')
my_operation()
**Context Manager Tracing**
To trace a function using the span context manager::
import time
import opentracing
from ddtrace.opentracer import Tracer, set_global_tracer
def init_tracer(service_name):
config = {
'agent_hostname': 'localhost',
'agent_port': 8126,
}
tracer = Tracer(service_name, config=config)
set_global_tracer(tracer)
return tracer
def my_operation():
with opentracing.tracer.start_span('my_operation_name') as span:
span.set_tag('my_interesting_tag', 'my_interesting_value')
time.sleep(0.05)
init_tracer('my_service_name')
my_operation()
See our tracing trace-examples_ repository for concrete, runnable examples of
the Datadog opentracer.
.. _trace-examples: https://github.com/DataDog/trace-examples/tree/master/python
See also the `Python OpenTracing`_ repository for usage of the tracer.
.. _Python OpenTracing: https://github.com/opentracing/opentracing-python
**Alongside Datadog tracer**
The Datadog OpenTracing tracer can be used alongside the Datadog tracer. This
provides the advantage of providing tracing information collected by
``ddtrace`` in addition to OpenTracing. The simplest way to do this is to use
the :ref:`ddtrace-run<ddtracerun>` command to invoke your OpenTraced
application.
**Opentracer API**
.. autoclass:: ddtrace.opentracer.Tracer
:members:
:special-members: __init__
.. _ddtracerun:
``ddtrace-run``
---------------
``ddtrace-run`` will trace :ref:`supported<Supported Libraries>` web frameworks
and database modules without the need for changing your code::
$ ddtrace-run -h
Execute the given Python program, after configuring it
to emit Datadog traces.
Append command line arguments to your program as usual.
Usage: [ENV_VARS] ddtrace-run <my_program>
The available environment variables for ``ddtrace-run`` are:
* ``DATADOG_TRACE_ENABLED=true|false`` (default: true): Enable web framework and
library instrumentation. When false, your application code will not generate
any traces.
* ``DATADOG_ENV`` (no default): Set an application's environment e.g. ``prod``,
``pre-prod``, ``stage``
* ``DATADOG_TRACE_DEBUG=true|false`` (default: false): Enable debug logging in
the tracer
* ``DATADOG_SERVICE_NAME`` (no default): override the service name to be used
for this program. This value is passed through when setting up middleware for
web framework integrations (e.g. flask, django). For tracing without a
web integration, prefer setting the service name in code.
* ``DATADOG_PATCH_MODULES=module:patch,module:patch...`` e.g.
``boto:true,redis:false``: override the modules patched for this execution of
the program (default: none)
* ``DATADOG_TRACE_AGENT_HOSTNAME=localhost``: override the address of the trace
agent host that the default tracer will attempt to submit to (default:
``localhost``)
* ``DATADOG_TRACE_AGENT_PORT=8126``: override the port that the default tracer
will submit to (default: 8126)
* ``DATADOG_PRIORITY_SAMPLING`` (default: true): enables :ref:`Priority
Sampling`
* ``DD_LOGS_INJECTION`` (default: false): enables :ref:`Logs Injection`
``ddtrace-run`` respects a variety of common entrypoints for web applications:
- ``ddtrace-run python my_app.py``
- ``ddtrace-run python manage.py runserver``
- ``ddtrace-run gunicorn myapp.wsgi:application``
- ``ddtrace-run uwsgi --http :9090 --wsgi-file my_app.py``
Pass along command-line arguments as your program would normally expect them::
$ ddtrace-run gunicorn myapp.wsgi:application --max-requests 1000 --statsd-host localhost:8125
If you're running in a Kubernetes cluster and still don't see your traces, make
sure your application has a route to the tracing Agent. An easy way to test
this is with a::
$ pip install ipython
$ DATADOG_TRACE_DEBUG=true ddtrace-run ipython
Because iPython uses SQLite, it will be automatically instrumented and your
traces should be sent off. If an error occurs, a message will be displayed in
the console, and changes can be made as needed.
API
---
``Tracer``
^^^^^^^^^^
.. autoclass:: ddtrace.Tracer
:members:
:special-members: __init__
``Span``
^^^^^^^^
.. autoclass:: ddtrace.Span
:members:
:special-members: __init__
``Pin``
^^^^^^^
.. autoclass:: ddtrace.Pin
:members:
:special-members: __init__
.. _patch_all:
``patch_all``
^^^^^^^^^^^^^
.. autofunction:: ddtrace.monkey.patch_all
``patch``
^^^^^^^^^
.. autofunction:: ddtrace.monkey.patch
.. toctree::
:maxdepth: 2

View File

@ -0,0 +1,18 @@
Asynchronous Libraries
----------------------
.. _asyncio:
asyncio
^^^^^^^
.. automodule:: ddtrace.contrib.asyncio
.. _gevent:
gevent
^^^^^^
.. automodule:: ddtrace.contrib.gevent

View File

@ -0,0 +1,107 @@
.. _`basic usage`:
Basic Usage
===========
With ``ddtrace`` installed, the application can be instrumented.
Auto Instrumentation
--------------------
``ddtrace-run``
^^^^^^^^^^^^^^^
Python applications can easily be instrumented with ``ddtrace`` by using the
included ``ddtrace-run`` command. Simply prefix your Python execution command
with ``ddtrace-run`` in order to auto-instrument the libraries in your
application.
For example, if the command to run your application is::
$ python app.py
then to auto-instrument using Datadog, the corresponding command is::
$ ddtrace-run python app.py
For more advanced usage of ``ddtrace-run`` refer to the documentation :ref:`here<ddtracerun>`.
``patch_all``
^^^^^^^^^^^^^
To manually invoke the automatic instrumentation use ``patch_all``::
from ddtrace import patch_all
patch_all()
To toggle instrumentation for a particular module::
from ddtrace import patch_all
patch_all(redis=False, cassandra=False)
By default all supported libraries will be patched when
``patch_all`` is invoked.
**Note:** To ensure that the supported libraries are instrumented properly in
the application, they must be patched *prior* to being imported. So make sure
to call ``patch_all`` *before* importing libraries that are to be instrumented.
More information about ``patch_all`` is available in our :ref:`patch_all` API
documentation.
Manual Instrumentation
----------------------
If you would like to extend the functionality of the ``ddtrace`` library or gain
finer control over instrumenting your application, several techniques are
provided by the library.
Decorator
^^^^^^^^^
``ddtrace`` provides a decorator that can be used to trace a particular method
in your application::
@tracer.wrap()
def business_logic():
"""A method that would be of interest to trace."""
# ...
# ...
API details of the decorator can be found here :py:meth:`ddtrace.Tracer.wrap`.
Context Manager
^^^^^^^^^^^^^^^
To trace an arbitrary block of code, you can use :py:meth:`ddtrace.Tracer.trace`
that returns a :py:mod:`ddtrace.Span` which can be used as a context manager::
# trace some interesting operation
with tracer.trace('interesting.operations'):
# do some interesting operation(s)
# ...
# ...
Further API details can be found here :py:meth:`ddtrace.Tracer`.
Using the API
^^^^^^^^^^^^^
If the above methods are still not enough to satisfy your tracing needs, a
manual API is provided which will allow you to start and finish spans however
you may require::
span = tracer.trace('operations.of.interest')
# do some operation(s) of interest in between
# NOTE: make sure to call span.finish() or the entire trace will not be sent
# to Datadog
span.finish()
API details of the decorator can be found here:
- :py:meth:`ddtrace.Tracer.trace`
- :py:meth:`ddtrace.Span.finish`.

347
reference/docs/conf.py Normal file
View File

@ -0,0 +1,347 @@
# -*- coding: utf-8 -*-
#
# ddtrace documentation build configuration file, created by
# sphinx-quickstart on Thu Jul 7 17:25:05 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
from datetime import datetime
# append the ddtrace path to syspath
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.extlinks",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
year = datetime.now().year
project = u"ddtrace"
copyright = u"2016-{}, Datadog, Inc.".format(year) # noqa: A001
author = u"Datadog, Inc."
# document in order of source
autodoc_member_order = "bysource"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# version = u'0.2'
# The full version, including alpha/beta/rc tags.
# release = u'0.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
"description": "Datadog's Python tracing client",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'ddtrace v0.2'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
html_sidebars = {"**": ["about.html", "nav.html", "relations.html", "searchbox.html"]}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "ddtracedoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "ddtrace.tex", u"ddtrace Documentation", u"Datadog, Inc", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "ddtrace", u"ddtrace Documentation", [author], 1)]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"ddtrace",
u"ddtrace Documentation",
author,
"ddtrace",
"One line description of project.",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False

View File

@ -0,0 +1,87 @@
==============
Contributing
==============
When contributing to this repository, we advise you to discuss the change you
wish to make via an `issue <https://github.com/DataDog/dd-trace-py/issues>`_.
Branches
========
Developement happens in the `master` branch. When all the features for the next
milestone are merged, the next version is released and tagged on the `master`
branch as `vVERSION`.
Your pull request should targets the `master` branch.
Once a new version is released, a `release/VERSION` branch might be created to
support micro releases to `VERSION`. Patches should be cherry-picking from the
`master` branch where possible — or otherwise created from scratch.
Pull Request Process
====================
In order to be merged, a pull request needs to meet the following
conditions:
1. The test suite must pass.
2. One of the repository Members must approve the pull request.
3. Proper unit and integration testing must be implemented.
4. Proper documentation must be written.
Splitting Pull Requests
=======================
If you discussed your feature within an issue (as advised), there's a great
chance that the implementation appears doable in several steps. In order to
facilite the review process, we strongly advise to split your feature
implementation in small pull requests (if that is possible) so they contain a
very small number of commits (a single commit per pull request being optimal).
That ensures that:
1. Each commit passes the test suite.
2. The code reviewing process done by humans is easier as there is less code to
understand at a glance.
Internal API
============
The `ddtrace.internal` module contains code that must only be used inside
`ddtrace` itself. Relying on the API of this module is dangerous and can break
at anytime. Don't do it.
Python Versions and Implementations Support
===========================================
The following Python implementations are supported:
- CPython
Versions of those implementations that are supported are the Python versions
that are currently supported by the community.
Libraries Support
=================
External libraries support is implemented in submodules of the `ddtest.contrib`
module.
Our goal is to support:
- The latest version of a library.
- All versions of a library that have been released less than 1 year ago.
Support for older versions of a library will be kept as long as possible as
long as it can be done without too much pain and backward compatibility — on a
best effort basis. Therefore, support for old versions of a library might be
dropped from the testing pipeline at anytime.
Code Style
==========
The code style is enforced by `flake8 <https://pypi.org/project/flake8>`_, its
configuration, and possibly extensions. No code style review should be done by
a human. All code style enforcement must be automatized to avoid bikeshedding
and losing time.

View File

@ -0,0 +1,177 @@
Datastore Libraries
===================
.. _algoliasearch:
Algoliasearch
-------------
.. automodule:: ddtrace.contrib.algoliasearch
.. _cassandra:
Cassandra
---------
.. automodule:: ddtrace.contrib.cassandra
.. _consul:
Consul
------
.. automodule:: ddtrace.contrib.consul
.. _dogpile.cache:
dogpile.cache
-------------
.. automodule:: ddtrace.contrib.dogpile_cache
.. _elasticsearch:
Elasticsearch
-------------
.. automodule:: ddtrace.contrib.elasticsearch
.. _flask_cache:
Flask Cache
-----------
.. automodule:: ddtrace.contrib.flask_cache
.. _mongodb:
MongoDB
-------
.. _mongoengine:
Mongoengine
^^^^^^^^^^^
.. automodule:: ddtrace.contrib.mongoengine
.. _pymongo:
Pymongo
^^^^^^^
.. automodule:: ddtrace.contrib.pymongo
Memcached
---------
.. _pylibmc:
pylibmc
^^^^^^^
.. automodule:: ddtrace.contrib.pylibmc
.. _pymemcache:
pymemcache
^^^^^^^^^^
.. automodule:: ddtrace.contrib.pymemcache
MySQL
-----
.. _mysql-connector:
mysql-connector
^^^^^^^^^^^^^^^
.. automodule:: ddtrace.contrib.mysql
.. _mysqlclient:
.. _MySQL-python:
.. _mysqldb:
mysqlclient/MySQL-python
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. automodule:: ddtrace.contrib.mysqldb
.. _pymysql:
pymysql
^^^^^^^
.. automodule:: ddtrace.contrib.pymysql
Postgres
--------
.. _aiopg:
aiopg
^^^^^
.. automodule:: ddtrace.contrib.aiopg
.. _psycopg2:
psycopg
^^^^^^^
.. automodule:: ddtrace.contrib.psycopg
Redis
-----
.. _redis:
redis
^^^^^
.. automodule:: ddtrace.contrib.redis
.. _rediscluster:
redis-py-cluster
^^^^^^^^^^^^^^^^
.. automodule:: ddtrace.contrib.rediscluster
.. _sqlalchemy:
SQLAlchemy
----------
.. automodule:: ddtrace.contrib.sqlalchemy
.. _sqllite:
SQLite
------
.. automodule:: ddtrace.contrib.sqlite3
.. _vertica:
Vertica
-------
.. automodule:: ddtrace.contrib.vertica

146
reference/docs/index.rst Normal file
View File

@ -0,0 +1,146 @@
.. include:: ./shared.rst
Datadog Python Trace Client
===========================
``ddtrace`` is Datadog's Python tracing client. It is used to trace requests as
they flow across web servers, databases and microservices. This enables
developers to have greater visibility into bottlenecks and troublesome requests
in their application.
Getting Started
---------------
For a basic product overview: check out the `setup documentation`_.
For details about developing and contributing: refer to the `development
guide`_.
For descriptions of the terminology of Datadog APM: take a look at the `official
documentation`_.
.. _`Supported Libraries`:
Supported Libraries
-------------------
We officially support Python 3.4 and above.
The versions listed are the versions that we have tested, but ``ddtrace`` can
still be compatible with other versions of these libraries. If a version of a
library you use is unsupported, feel free to contribute or request it by
contacting support.
.. |SUPPVER| replace:: Supported Version
.. |AUTO| replace:: Automatically Instrumented
+--------------------------------------------------+---------------+----------------+
| Integration | |SUPPVER| | |AUTO| [1]_ |
+==================================================+===============+================+
| :ref:`aiobotocore` | >= 0.2.3 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`aiohttp` | >= 1.2 | Yes [2]_ |
+--------------------------------------------------+---------------+----------------+
| :ref:`aiopg` | >= 0.12.0 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`algoliasearch` | >= 1.20.0 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`boto2` | >= 2.29.0 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`botocore` | >= 1.4.51 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`bottle` | >= 0.11 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`celery` | >= 3.1 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`cassandra` | >= 3.5 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`consul` | >= 0.7 | Yes [3]_ |
+--------------------------------------------------+---------------+----------------+
| :ref:`django` | >= 1.8 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`djangorestframework <djangorestframework>` | >= 3.4 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`elasticsearch` | >= 1.6 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`falcon` | >= 1.0 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`flask` | >= 0.10 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`flask_cache` | >= 0.12 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`gevent` | >= 1.1 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`grpc` | >= 1.8.0 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`jinja2` | >= 2.7 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`mako` | >= 0.1.0 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`kombu` | >= 4.0 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`molten` | >= 0.7.0 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`mongoengine` | >= 0.11 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`mysql-connector` | >= 2.1 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`MySQL-python <MySQL-python>` | >= 1.2.3 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`mysqlclient <mysqlclient>` | >= 1.3 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`psycopg2` | >= 2.4 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`pylibmc` | >= 1.4 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`pymemcache` | >= 1.3 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`pymongo` | >= 3.0 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`pyramid` | >= 1.7 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`redis` | >= 2.6 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`rediscluster` | >= 1.3.5 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`requests` | >= 2.08 | Yes |
+--------------------------------------------------+---------------+----------------+
| :ref:`sqlalchemy` | >= 1.0 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`tornado` | >= 4.0 | No |
+--------------------------------------------------+---------------+----------------+
| :ref:`vertica` | >= 0.6 | Yes |
+--------------------------------------------------+---------------+----------------+
.. [1] Libraries that are automatically instrumented when the
:ref:`ddtrace-run<ddtracerun>` command is used or the ``patch_all()`` method
is called. Always use ``patch()`` and ``patch_all()`` as soon as possible in
your Python entrypoint.
.. [2] only third-party modules such as aiohttp_jinja2
.. [3] only the syncronous client
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
.. toctree::
:hidden:
installation_quickstart
web_integrations
db_integrations
async_integrations
other_integrations
basic_usage
advanced_usage
contributing

View File

@ -0,0 +1,107 @@
.. include:: ./shared.rst
.. _Installation:
Installation + Quickstart
=========================
Before installing be sure to read through the `setup documentation`_ to ensure
your environment is ready to receive traces.
Installation
------------
Install with :code:`pip`::
$ pip install ddtrace
We strongly suggest pinning the version of the library you deploy.
Quickstart
----------
Getting started with ``ddtrace`` is as easy as prefixing your python
entry-point command with ``ddtrace-run``.
For example if you start your application with ``python app.py`` then run::
$ ddtrace-run python app.py
For more advanced usage of ``ddtrace-run`` refer to the documentation :ref:`here<ddtracerun>`.
To find out how to trace your own code manually refer to the documentation :ref:`here<basic usage>`.
Configuration
~~~~~~~~~~~~~
You can configure some parameters of the library by setting environment
variable before starting your application and importing the library:
.. list-table::
:header-rows: 1
:widths: 1 1 2
* - Configuration Variable
- Configuration Type
- Default Value
- Value Description
* - ``DD_TRACE_AGENT_URL``
- URL
- ``http://localhost:8126``
- The URL to use to connect the Datadog agent. The url can starts with
``http://`` to connect using HTTP or with ``unix://`` to use a Unix
Domain Socket.
OpenTracing
-----------
``ddtrace`` also provides an OpenTracing API to the Datadog tracer so
that you can use the Datadog tracer in your OpenTracing-compatible
applications.
Installation
^^^^^^^^^^^^
Include OpenTracing with ``ddtrace``::
$ pip install ddtrace[opentracing]
To include the OpenTracing dependency in your project with ``ddtrace``, ensure
you have the following in ``setup.py``::
install_requires=[
"ddtrace[opentracing]",
],
Configuration
^^^^^^^^^^^^^
The OpenTracing convention for initializing a tracer is to define an
initialization method that will configure and instantiate a new tracer and
overwrite the global ``opentracing.tracer`` reference.
Typically this method looks something like::
from ddtrace.opentracer import Tracer, set_global_tracer
def init_tracer(service_name):
"""
Initialize a new Datadog opentracer and set it as the
global tracer.
This overwrites the opentracing.tracer reference.
"""
config = {
'agent_hostname': 'localhost',
'agent_port': 8126,
}
tracer = Tracer(service_name, config=config)
set_global_tracer(tracer)
return tracer
For more advanced usage of OpenTracing in ``ddtrace`` refer to the
documentation :ref:`here<adv_opentracing>`.

View File

@ -0,0 +1,91 @@
Other Libraries
===============
.. _boto:
Boto
----
.. _aiobotocore:
aiobotocore
^^^^^^^^^^^
.. automodule:: ddtrace.contrib.aiobotocore
.. _boto2:
Boto2
^^^^^
.. automodule:: ddtrace.contrib.boto
.. _botocore:
Botocore
^^^^^^^^
.. automodule:: ddtrace.contrib.botocore
.. _futures:
Futures
-------
.. automodule:: ddtrace.contrib.futures
.. _celery:
Celery
------
.. automodule:: ddtrace.contrib.celery
.. _kombu:
Kombu
------
.. automodule:: ddtrace.contrib.kombu
.. _httplib:
httplib
-------
.. automodule:: ddtrace.contrib.httplib
.. _requests:
Requests
--------
.. automodule:: ddtrace.contrib.requests
.. _grpc:
Grpc
----
.. automodule:: ddtrace.contrib.grpc
.. _jinja2:
Jinja2
------
.. automodule:: ddtrace.contrib.jinja2
.. _mako:
Mako
------
.. automodule:: ddtrace.contrib.mako

View File

@ -0,0 +1,5 @@
.. _setup documentation: https://docs.datadoghq.com/tracing/setup/python/
.. _official documentation: https://docs.datadoghq.com/tracing/visualization/
.. _development guide: https://github.com/datadog/dd-trace-py#development

View File

@ -0,0 +1,76 @@
Web Frameworks
--------------
``ddtrace`` provides tracing support for many Python web frameworks. For each
framework ``ddtrace`` supports:
- tracing of requests [*]_: trace requests through middleware and back
- distributed tracing [*]_: trace requests across application boundaries
- automatic error tagging [*]_: spans will be marked with any errors that occur
.. [*] https://docs.datadoghq.com/tracing/
.. [*] https://docs.datadoghq.com/tracing/faq/distributed-tracing/
.. [*] "erroneous HTTP return codes" are defined as being greater than 500
.. _aiohttp:
aiohttp
^^^^^^^
.. automodule:: ddtrace.contrib.aiohttp
.. _bottle:
Bottle
^^^^^^
.. automodule:: ddtrace.contrib.bottle
.. _djangorestframework:
.. _django:
Django
^^^^^^
.. automodule:: ddtrace.contrib.django
.. _falcon:
Falcon
^^^^^^
.. automodule:: ddtrace.contrib.falcon
.. _flask:
Flask
^^^^^
.. automodule:: ddtrace.contrib.flask
.. _molten:
Molten
^^^^^^
.. automodule:: ddtrace.contrib.molten
.. _pyramid:
Pyramid
^^^^^^^
.. automodule:: ddtrace.contrib.pyramid
.. _tornado:
Tornado
^^^^^^^
.. automodule:: ddtrace.contrib.tornado