Compare commits
No commits in common. "master" and "debian" have entirely different histories.
|
@ -1,4 +0,0 @@
|
|||
*.py[co]
|
||||
build
|
||||
dist
|
||||
mydatabase
|
14
.travis.yml
14
.travis.yml
|
@ -1,14 +0,0 @@
|
|||
language: python
|
||||
env:
|
||||
- DJANGO=1.6
|
||||
- DJANGO=1.7
|
||||
python:
|
||||
- "2.6"
|
||||
- "2.7"
|
||||
- "pypy"
|
||||
install:
|
||||
- pip install -r requirements.txt -r optional.txt --use-mirrors
|
||||
- pip install "Django<${DJANGO}" --use-mirrors
|
||||
script: DJANGO_SETTINGS_MODULE='django_statsd.test_settings' nosetests
|
||||
notifications:
|
||||
irc: "irc.mozilla.org#amo-bots"
|
|
@ -1,5 +0,0 @@
|
|||
BSD and MPL
|
||||
|
||||
Portions of this are from commonware:
|
||||
|
||||
https://github.com/jsocol/commonware/blob/master/LICENSE
|
|
@ -1,4 +0,0 @@
|
|||
include LICENSE.rst
|
||||
include README.rst
|
||||
recursive-include django_statsd/templates *
|
||||
recursive-include django_statsd/static *
|
18
README.rst
18
README.rst
|
@ -1,18 +0,0 @@
|
|||
============================
|
||||
Django Statsd |Build Status|
|
||||
============================
|
||||
|
||||
Documentation is on `Read the Docs <https://django-statsd.readthedocs.org/>`_.
|
||||
|
||||
-------
|
||||
License
|
||||
-------
|
||||
|
||||
BSD and MPL
|
||||
|
||||
Portions of this are from commonware:
|
||||
|
||||
https://github.com/jsocol/commonware/blob/master/LICENSE
|
||||
|
||||
.. |Build Status| image:: https://travis-ci.org/django-statsd/django-statsd.svg?branch=master
|
||||
:target: https://travis-ci.org/django-statsd/django-statsd
|
|
@ -0,0 +1,5 @@
|
|||
django-statsd-mozilla (0.3.14-0) unstable; urgency=low
|
||||
|
||||
* Initial packaging
|
||||
|
||||
-- Benjamin Dauvergne <bdauvergne@entrouvert.com> Thu, 25 Sep 2014 15:58:57 +0200
|
|
@ -0,0 +1 @@
|
|||
7
|
|
@ -0,0 +1,12 @@
|
|||
Source: django-statsd-mozilla
|
||||
Maintainer: Benjamin Dauvergne <bdauvergne@entrouvert.com>
|
||||
Section: python
|
||||
Priority: optional
|
||||
Build-Depends: python-setuptools (>= 0.6b3), python-all (>= 2.6.6-3), debhelper (>= 7)
|
||||
Standards-Version: 3.9.1
|
||||
X-Python-Version: >= 2.6
|
||||
|
||||
Package: python-django-statsd-mozilla
|
||||
Architecture: all
|
||||
Depends: ${misc:Depends}, ${python:Depends}
|
||||
Description: Statsd middleware for Django
|
|
@ -0,0 +1,6 @@
|
|||
#!/usr/bin/make -f
|
||||
|
||||
%:
|
||||
dh $@ --with python2
|
||||
|
||||
|
|
@ -0,0 +1 @@
|
|||
3.0 (quilt)
|
|
@ -0,0 +1 @@
|
|||
extend-diff-ignore="\.egg-info"
|
|
@ -1,4 +0,0 @@
|
|||
from django_statsd import patches
|
||||
from django_statsd import clients
|
||||
|
||||
from django_statsd.plugins import NoseStatsd
|
|
@ -1,59 +0,0 @@
|
|||
|
||||
|
||||
from django_statsd.clients import statsd
|
||||
import time
|
||||
|
||||
_task_start_times = {}
|
||||
|
||||
|
||||
def on_task_sent(sender=None, task_id=None, task=None, **kwds):
|
||||
"""
|
||||
Handle Celery ``task_sent`` signals.
|
||||
"""
|
||||
# Increase statsd counter.
|
||||
statsd.incr('celery.%s.sent' % task)
|
||||
|
||||
|
||||
def on_task_prerun(sender=None, task_id=None, task=None, **kwds):
|
||||
"""
|
||||
Handle Celery ``task_prerun``signals.
|
||||
"""
|
||||
# Increase statsd counter.
|
||||
statsd.incr('celery.%s.start' % task.name)
|
||||
|
||||
# Keep track of start times. (For logging the duration in the postrun.)
|
||||
_task_start_times[task_id] = time.time()
|
||||
|
||||
|
||||
def on_task_postrun(sender=None, task_id=None, task=None, **kwds):
|
||||
"""
|
||||
Handle Celery ``task_postrun`` signals.
|
||||
"""
|
||||
# Increase statsd counter.
|
||||
statsd.incr('celery.%s.done' % task.name)
|
||||
|
||||
# Log duration.
|
||||
start_time = _task_start_times.pop(task_id, False)
|
||||
if start_time:
|
||||
ms = int((time.time() - start_time) * 1000)
|
||||
statsd.timing('celery.%s.runtime' % task.name, ms)
|
||||
|
||||
|
||||
def on_task_failure(sender=None, task_id=None, task=None, **kwds):
|
||||
"""
|
||||
Handle Celery ``task_failure`` signals.
|
||||
"""
|
||||
# Increase statsd counter.
|
||||
statsd.incr('celery.%s.failure' % task)
|
||||
|
||||
|
||||
def register_celery_events():
|
||||
try:
|
||||
from celery import signals
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
signals.task_sent.connect(on_task_sent)
|
||||
signals.task_prerun.connect(on_task_prerun)
|
||||
signals.task_postrun.connect(on_task_postrun)
|
||||
signals.task_failure.connect(on_task_failure)
|
|
@ -1,34 +0,0 @@
|
|||
import socket
|
||||
|
||||
try:
|
||||
from importlib import import_module
|
||||
except ImportError:
|
||||
from django.utils.importlib import import_module
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
_statsd = None
|
||||
|
||||
|
||||
def get(name, default):
|
||||
try:
|
||||
return getattr(settings, name, default)
|
||||
except ImportError:
|
||||
return default
|
||||
|
||||
|
||||
def get_client():
|
||||
client = get('STATSD_CLIENT', 'statsd.client')
|
||||
host = get('STATSD_HOST', 'localhost')
|
||||
# This is causing problems with statsd
|
||||
# gaierror ([Errno -9] Address family for hostname not supported)
|
||||
# TODO: figure out what to do here.
|
||||
# host = socket.gethostbyaddr(host)[2][0]
|
||||
port = get('STATSD_PORT', 8125)
|
||||
prefix = get('STATSD_PREFIX', None)
|
||||
return import_module(client).StatsClient(host=host, port=port, prefix=prefix)
|
||||
|
||||
if not _statsd:
|
||||
_statsd = get_client()
|
||||
|
||||
statsd = _statsd
|
|
@ -1,26 +0,0 @@
|
|||
import logging
|
||||
|
||||
from django_statsd.clients.null import StatsClient
|
||||
|
||||
log = logging.getLogger('statsd')
|
||||
|
||||
|
||||
class StatsClient(StatsClient):
|
||||
"""A client that sends messages to the logging framework."""
|
||||
|
||||
def timing(self, stat, delta, rate=1):
|
||||
"""Send new timing information. `delta` is in milliseconds."""
|
||||
log.info('Timing: %s, %s, %s' % (stat, delta, rate))
|
||||
|
||||
def incr(self, stat, count=1, rate=1):
|
||||
"""Increment a stat by `count`."""
|
||||
log.info('Increment: %s, %s, %s' % (stat, count, rate))
|
||||
|
||||
def decr(self, stat, count=1, rate=1):
|
||||
"""Decrement a stat by `count`."""
|
||||
log.info('Decrement: %s, %s, %s' % (stat, count, rate))
|
||||
|
||||
def gauge(self, stat, value, rate=1, delta=False):
|
||||
"""Set a gauge value."""
|
||||
log.info('Gauge: %s, %s%s, %s' % (
|
||||
stat, '' if not delta else 'diff ', value, rate))
|
|
@ -1,34 +0,0 @@
|
|||
from django_statsd.clients.null import StatsClient
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class StatsClient(StatsClient):
|
||||
"""A client that pushes messages to metlog """
|
||||
|
||||
def __init__(self, host='localhost', port=8125, prefix=None):
|
||||
super(StatsClient, self).__init__(host, port, prefix)
|
||||
if prefix is None:
|
||||
raise AttributeError(
|
||||
"Metlog needs settings.STATSD_PREFIX to be defined")
|
||||
|
||||
self._prefix = prefix
|
||||
if getattr(settings, 'METLOG', None) is None:
|
||||
raise AttributeError(
|
||||
"Metlog needs to be configured as settings.METLOG")
|
||||
|
||||
self.metlog = settings.METLOG
|
||||
|
||||
def timing(self, stat, delta, rate=1):
|
||||
"""Send new timing information. `delta` is in milliseconds."""
|
||||
stat = '%s.%s' % (self._prefix, stat)
|
||||
self.metlog.timer_send(stat, delta, rate=rate)
|
||||
|
||||
def incr(self, stat, count=1, rate=1):
|
||||
"""Increment a stat by `count`."""
|
||||
stat = '%s.%s' % (self._prefix, stat)
|
||||
self.metlog.incr(stat, count, rate=rate)
|
||||
|
||||
def decr(self, stat, count=1, rate=1):
|
||||
"""Decrement a stat by `count`."""
|
||||
stat = '%s.%s' % (self._prefix, stat)
|
||||
self.metlog.incr(stat, -count, rate=rate)
|
|
@ -1 +0,0 @@
|
|||
from statsd.client import StatsClient
|
|
@ -1,2 +0,0 @@
|
|||
# This is just a place holder, the toolbar works well enough for now.
|
||||
from django_statsd.clients.toolbar import StatsClient
|
|
@ -1,8 +0,0 @@
|
|||
from statsd.client import StatsClient
|
||||
|
||||
|
||||
class StatsClient(StatsClient):
|
||||
"""A null client that does nothing."""
|
||||
|
||||
def _after(self, data):
|
||||
pass
|
|
@ -1,44 +0,0 @@
|
|||
from collections import defaultdict
|
||||
from time import time
|
||||
|
||||
from django_statsd.clients.null import StatsClient
|
||||
|
||||
|
||||
class StatsClient(StatsClient):
|
||||
"""A client that pushes things into a local cache."""
|
||||
|
||||
def __init__(self, *args, **kw):
|
||||
super(StatsClient, self).__init__(*args, **kw)
|
||||
self.reset()
|
||||
|
||||
def reset(self):
|
||||
self.cache = defaultdict(list)
|
||||
self.timings = []
|
||||
|
||||
def timing(self, stat, delta, rate=1):
|
||||
"""Send new timing information. `delta` is in milliseconds."""
|
||||
stat = '%s|timing' % stat
|
||||
now = time() * 1000
|
||||
self.timings.append([stat, now - delta, delta, now])
|
||||
|
||||
def incr(self, stat, count=1, rate=1):
|
||||
"""Increment a stat by `count`."""
|
||||
stat = '%s|count' % stat
|
||||
self.cache[stat].append([count, rate])
|
||||
|
||||
def decr(self, stat, count=1, rate=1):
|
||||
"""Decrement a stat by `count`."""
|
||||
stat = '%s|count' % stat
|
||||
self.cache[stat].append([-count, rate])
|
||||
|
||||
def gauge(self, stat, value, rate=1, delta=False):
|
||||
"""Set a gauge value."""
|
||||
stat = '%s|gauge' % stat
|
||||
if delta:
|
||||
self.cache[stat].append([value, rate])
|
||||
else:
|
||||
self.cache[stat] = [[value, rate]]
|
||||
|
||||
def set(self, stat, value, rate=1):
|
||||
stat = '%s|set' % stat
|
||||
self.cache[stat].append([value, rate])
|
|
@ -1,13 +0,0 @@
|
|||
import logging
|
||||
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
|
||||
class StatsdHandler(logging.Handler):
|
||||
"""Send error to statsd"""
|
||||
|
||||
def emit(self, record):
|
||||
if not record.exc_info:
|
||||
return
|
||||
|
||||
statsd.incr('error.%s' % record.exc_info[0].__name__.lower())
|
|
@ -1,23 +0,0 @@
|
|||
from optparse import make_option
|
||||
import time
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """
|
||||
Send a ping to statsd, this is suitable for using as a line in graphite
|
||||
charts, for example:
|
||||
http://codeascraft.etsy.com/2010/12/08/track-every-release/
|
||||
|
||||
`key`: key.to.ping.with
|
||||
"""
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--key', action='store', type='string',
|
||||
dest='key', help='Key to ping'),
|
||||
)
|
||||
|
||||
def handle(self, *args, **kw):
|
||||
statsd.timing(kw.get('key'), time.time())
|
|
@ -1,74 +0,0 @@
|
|||
import inspect
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.http import Http404
|
||||
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
|
||||
try:
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
except ImportError:
|
||||
class MiddlewareMixin(object):
|
||||
pass
|
||||
|
||||
|
||||
class GraphiteMiddleware(MiddlewareMixin):
|
||||
|
||||
def process_response(self, request, response):
|
||||
statsd.incr('response.%s' % response.status_code)
|
||||
if hasattr(request, 'user') and request.user.is_authenticated():
|
||||
statsd.incr('response.auth.%s' % response.status_code)
|
||||
return response
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
if not isinstance(exception, Http404):
|
||||
statsd.incr('response.500')
|
||||
if hasattr(request, 'user') and request.user.is_authenticated():
|
||||
statsd.incr('response.auth.500')
|
||||
|
||||
|
||||
class GraphiteRequestTimingMiddleware(MiddlewareMixin):
|
||||
"""statsd's timing data per view."""
|
||||
|
||||
def process_view(self, request, view_func, view_args, view_kwargs):
|
||||
view = view_func
|
||||
if not inspect.isfunction(view_func):
|
||||
view = view.__class__
|
||||
try:
|
||||
request._view_module = view.__module__
|
||||
request._view_name = view.__name__
|
||||
request._start_time = time.time()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def process_response(self, request, response):
|
||||
self._record_time(request)
|
||||
return response
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
self._record_time(request)
|
||||
|
||||
def _record_time(self, request):
|
||||
if hasattr(request, '_start_time'):
|
||||
ms = int((time.time() - request._start_time) * 1000)
|
||||
data = dict(module=request._view_module, name=request._view_name,
|
||||
method=request.method)
|
||||
statsd.timing('view.{module}.{name}.{method}'.format(**data), ms)
|
||||
if getattr(settings, 'STATSD_VIEW_TIMER_DETAILS', True):
|
||||
statsd.timing('view.{module}.{method}'.format(**data), ms)
|
||||
statsd.timing('view.{method}'.format(**data), ms)
|
||||
|
||||
|
||||
class TastyPieRequestTimingMiddleware(GraphiteRequestTimingMiddleware):
|
||||
"""statd's timing specific to Tastypie."""
|
||||
|
||||
def process_view(self, request, view_func, view_args, view_kwargs):
|
||||
try:
|
||||
request._view_module = view_kwargs['api_name']
|
||||
request._view_name = view_kwargs['resource_name']
|
||||
request._start_time = time.time()
|
||||
except (AttributeError, KeyError):
|
||||
super(TastyPieRequestTimingMiddleware, self).process_view(
|
||||
request, view_func, view_args, view_kwargs)
|
|
@ -1,40 +0,0 @@
|
|||
from django.conf import settings
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
from .celery import register_celery_events
|
||||
|
||||
|
||||
if getattr(settings, 'STATSD_CELERY_SIGNALS', False):
|
||||
register_celery_events()
|
||||
|
||||
|
||||
def model_save(sender, **kwargs):
|
||||
"""
|
||||
Handle ``save`` events of all Django models.
|
||||
"""
|
||||
instance = kwargs.get('instance')
|
||||
|
||||
# Increase statsd counter.
|
||||
statsd.incr('models.%s.%s.%s' % (
|
||||
instance._meta.app_label,
|
||||
instance._meta.object_name,
|
||||
'create' if kwargs.get('created', False) else 'update',
|
||||
))
|
||||
|
||||
|
||||
def model_delete(sender, **kwargs):
|
||||
"""
|
||||
Handle ``delete`` events of all Django models.
|
||||
"""
|
||||
instance = kwargs.get('instance')
|
||||
|
||||
# Increase statsd counter.
|
||||
statsd.incr('models.%s.%s.delete' % (
|
||||
instance._meta.app_label,
|
||||
instance._meta.object_name,
|
||||
))
|
||||
|
||||
if getattr(settings, 'STATSD_MODEL_SIGNALS', False):
|
||||
post_save.connect(model_save)
|
||||
post_delete.connect(model_delete)
|
|
@ -1,112 +0,0 @@
|
|||
from collections import defaultdict
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import ugettext_lazy as _, ungettext
|
||||
|
||||
from debug_toolbar.panels import Panel
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
|
||||
def munge(stats):
|
||||
# Munge the stats back into something easy for a template.
|
||||
results = []
|
||||
for stat in sorted(stats.keys()):
|
||||
values = stats[stat]
|
||||
name, type_ = stat.split('|')
|
||||
total = sum([x * y for x, y in values])
|
||||
data = {'name': name, 'type': type_,
|
||||
'count': len(values),
|
||||
'total': total,
|
||||
'values': values}
|
||||
results.append(data)
|
||||
return results
|
||||
|
||||
|
||||
def times(stats):
|
||||
results = []
|
||||
if not stats:
|
||||
return results
|
||||
|
||||
all_start = stats[0][1]
|
||||
all_end = max([t[3] for t in stats])
|
||||
all_duration = all_end - all_start
|
||||
for stat, start, duration, end in stats:
|
||||
start_rel = (start - all_start)
|
||||
start_ratio = (start_rel / float(all_duration))
|
||||
duration_ratio = (duration / float(all_duration))
|
||||
try:
|
||||
duration_ratio_relative = duration_ratio / (1.0 - start_ratio)
|
||||
except ZeroDivisionError:
|
||||
duration_ratio_relative = 0
|
||||
results.append([stat.split('|')[0],
|
||||
# % start from left.
|
||||
start_ratio * 100.0,
|
||||
# % width
|
||||
duration_ratio_relative * 100.0,
|
||||
duration,
|
||||
])
|
||||
results.sort(key=lambda r: r[1])
|
||||
return results
|
||||
|
||||
|
||||
def times_summary(stats):
|
||||
results = []
|
||||
if not stats:
|
||||
return results
|
||||
|
||||
timings = defaultdict(list)
|
||||
for stat in stats:
|
||||
timings[stat[0].split('|')[0]].append(stat[2])
|
||||
|
||||
for stat, v in timings.items():
|
||||
if not v:
|
||||
continue
|
||||
v.sort()
|
||||
count = len(v)
|
||||
vmin, vmax = v[0], v[-1]
|
||||
vsum = sum(v)
|
||||
mean = vsum / float(count)
|
||||
results.append({
|
||||
'stat': stat,
|
||||
'count': count,
|
||||
'sum': vsum,
|
||||
'lower': vmin,
|
||||
'upper': vmax,
|
||||
'mean': mean,
|
||||
})
|
||||
return results
|
||||
|
||||
|
||||
class StatsdPanel(Panel):
|
||||
|
||||
title = _('Statsd')
|
||||
has_content = True
|
||||
|
||||
template = 'toolbar_statsd/statsd.html'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(StatsdPanel, self).__init__(*args, **kwargs)
|
||||
self.statsd = statsd
|
||||
try:
|
||||
self.statsd.reset()
|
||||
except AttributeError:
|
||||
raise ValueError('To use the toolbar, your STATSD_CLIENT must'
|
||||
'be set to django_statsd.clients.toolbar')
|
||||
|
||||
@property
|
||||
def nav_subtitle(self):
|
||||
length = len(self.statsd.cache) + len(self.statsd.timings)
|
||||
return ungettext('%s record', '%s records', length) % length
|
||||
|
||||
def process_response(self, request, response):
|
||||
config = getattr(settings, 'TOOLBAR_STATSD', {})
|
||||
if 'roots' in config:
|
||||
for key in ['timers', 'counts']:
|
||||
self.record_stats({key: config['roots'][key]})
|
||||
|
||||
self.record_stats({
|
||||
'graphite': config.get('graphite'),
|
||||
'statsd': munge(self.statsd.cache),
|
||||
'timings': times(self.statsd.timings),
|
||||
'timings_summary': times_summary(self.statsd.timings),
|
||||
})
|
|
@ -1,11 +0,0 @@
|
|||
from django.conf import settings
|
||||
|
||||
try:
|
||||
from importlib import import_module
|
||||
except ImportError:
|
||||
from django.utils.importlib import import_module
|
||||
|
||||
patches = getattr(settings, 'STATSD_PATCHES', [])
|
||||
|
||||
for patch in patches:
|
||||
import_module(patch).patch()
|
|
@ -1,23 +0,0 @@
|
|||
from django.core import cache
|
||||
from django.core.cache.backends.base import BaseCache
|
||||
|
||||
from django_statsd.patches.utils import wrap
|
||||
|
||||
|
||||
def key(cache, attr):
|
||||
return 'cache.%s.%s' % (cache.__module__.split('.')[-1], attr)
|
||||
|
||||
|
||||
class StatsdTracker(BaseCache):
|
||||
|
||||
def __init__(self, cache):
|
||||
self.cache = cache
|
||||
|
||||
def __getattribute__(self, attr):
|
||||
if attr == 'cache':
|
||||
return BaseCache.__getattribute__(self, attr)
|
||||
return wrap(getattr(self.cache, attr), key(self.cache, attr))
|
||||
|
||||
|
||||
def patch():
|
||||
cache.cache = StatsdTracker(cache.cache)
|
|
@ -1,69 +0,0 @@
|
|||
import django
|
||||
try:
|
||||
from django.db.backends import utils as util
|
||||
except ImportError:
|
||||
from django.db.backends import util
|
||||
|
||||
from django_statsd.patches.utils import wrap, patch_method
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
|
||||
def key(db, attr):
|
||||
return 'db.%s.%s.%s' % (db.client.executable_name, db.alias, attr)
|
||||
|
||||
|
||||
def pre_django_1_6_cursorwrapper_getattr(self, attr):
|
||||
"""
|
||||
The CursorWrapper is a pretty small wrapper around the cursor.
|
||||
If you are NOT in debug mode, this is the wrapper that's used.
|
||||
Sadly if it's in debug mode, we get a different wrapper.
|
||||
"""
|
||||
if self.db.is_managed():
|
||||
self.db.set_dirty()
|
||||
if attr in self.__dict__:
|
||||
return self.__dict__[attr]
|
||||
else:
|
||||
if attr in ['execute', 'executemany', 'callproc']:
|
||||
return wrap(getattr(self.cursor, attr), key(self.db, attr))
|
||||
return getattr(self.cursor, attr)
|
||||
|
||||
|
||||
def _get_query_type(query):
|
||||
return (query.split(None, 1) or ['__empty__'])[0].lower()
|
||||
|
||||
|
||||
def patched_execute(orig_execute, self, query, *args, **kwargs):
|
||||
with statsd.timer(key(self.db, 'execute.%s' % _get_query_type(query))):
|
||||
return orig_execute(self, query, *args, **kwargs)
|
||||
|
||||
|
||||
def patched_executemany(orig_executemany, self, query, *args, **kwargs):
|
||||
with statsd.timer(key(self.db, 'executemany.%s' % _get_query_type(query))):
|
||||
return orig_executemany(self, query, *args, **kwargs)
|
||||
|
||||
|
||||
def patched_callproc(orig_callproc, self, query, *args, **kwargs):
|
||||
with statsd.timer(key(self.db, 'callproc.%s' % _get_query_type(query))):
|
||||
return orig_callproc(self, query, *args, **kwargs)
|
||||
|
||||
|
||||
def patch():
|
||||
"""
|
||||
The CursorWrapper is a pretty small wrapper around the cursor. If
|
||||
you are NOT in debug mode, this is the wrapper that's used. Sadly
|
||||
if it's in debug mode, we get a different wrapper for version
|
||||
earlier than 1.6.
|
||||
"""
|
||||
|
||||
if django.VERSION > (1, 6):
|
||||
# In 1.6+ util.CursorDebugWrapper just makes calls to CursorWrapper
|
||||
# As such, we only need to instrument CursorWrapper.
|
||||
# Instrumenting both will result in duplicated metrics
|
||||
patch_method(util.CursorWrapper, 'execute')(patched_execute)
|
||||
patch_method(util.CursorWrapper, 'executemany')(patched_executemany)
|
||||
patch_method(util.CursorWrapper, 'callproc')(patched_callproc)
|
||||
else:
|
||||
util.CursorWrapper.__getattr__ = pre_django_1_6_cursorwrapper_getattr
|
||||
patch_method(util.CursorDebugWrapper, 'execute')(patched_execute)
|
||||
patch_method(
|
||||
util.CursorDebugWrapper, 'executemany')(patched_executemany)
|
|
@ -1,26 +0,0 @@
|
|||
from django_statsd.clients import statsd
|
||||
from functools import partial, wraps
|
||||
|
||||
|
||||
def patch_method(target, name, external_decorator=None):
|
||||
|
||||
def decorator(patch_function):
|
||||
original_function = getattr(target, name)
|
||||
|
||||
@wraps(patch_function)
|
||||
def wrapper(*args, **kw):
|
||||
return patch_function(original_function, *args, **kw)
|
||||
|
||||
setattr(target, name, wrapper)
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def wrapped(method, key, *args, **kw):
|
||||
with statsd.timer(key):
|
||||
return method(*args, **kw)
|
||||
|
||||
|
||||
def wrap(method, key, *args, **kw):
|
||||
return partial(wrapped, method, key, *args, **kw)
|
|
@ -1,81 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
NOSE = False
|
||||
try:
|
||||
from nose.plugins.base import Plugin
|
||||
NOSE = True
|
||||
except ImportError:
|
||||
class Plugin:
|
||||
pass
|
||||
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NoseStatsd(Plugin):
|
||||
name = 'statsd'
|
||||
|
||||
def options(self, parse, env=os.environ):
|
||||
super(NoseStatsd, self).options(parse, env=env)
|
||||
|
||||
def configure(self, options, conf):
|
||||
super(NoseStatsd, self).configure(options, conf)
|
||||
|
||||
def report(self, stream):
|
||||
def write(line):
|
||||
stream.writeln('%s' % line)
|
||||
|
||||
if not hasattr(statsd, 'timings'):
|
||||
write("Statsd timings not saved, ensure your statsd client is: "
|
||||
"STATSD_CLIENT = 'django_statsd.clients.nose'")
|
||||
return
|
||||
|
||||
timings = {}
|
||||
longest = 0
|
||||
for v in statsd.timings:
|
||||
k = v[0].split('|')[0]
|
||||
longest = max(longest, len(k))
|
||||
timings.setdefault(k, [])
|
||||
timings[k].append(v[2])
|
||||
|
||||
counts = {}
|
||||
for k, v in list(statsd.cache.items()):
|
||||
k = k.split('|')[0]
|
||||
longest = max(longest, len(k))
|
||||
counts.setdefault(k, [])
|
||||
[counts[k].append(_v) for _v in v]
|
||||
|
||||
header = '%s | Number | Avg (ms) | Total (ms)' % (
|
||||
'Statsd Keys'.ljust(longest))
|
||||
header_len = len(header)
|
||||
|
||||
write('')
|
||||
write('=' * header_len)
|
||||
write('%s | Number | Avg (ms) | Total (ms)' % (
|
||||
'Statsd Keys'.ljust(longest)))
|
||||
write('-' * header_len)
|
||||
if not timings:
|
||||
write('None')
|
||||
|
||||
for k in sorted(timings.keys()):
|
||||
v = timings[k]
|
||||
write('%s | %s | %s | %s' % (
|
||||
k.ljust(longest),
|
||||
str(len(v)).rjust(6),
|
||||
('%0.5f' % (sum(v) / float(len(v)))).rjust(10),
|
||||
('%0.3f' % sum(v)).rjust(10)))
|
||||
|
||||
write('=' * header_len)
|
||||
write('%s | Number | Total' % ('Statsd Counts'.ljust(longest)))
|
||||
write('-' * header_len)
|
||||
if not counts:
|
||||
write('None')
|
||||
|
||||
for k in sorted(counts.keys()):
|
||||
v = counts[k]
|
||||
write('%s | %s | %d' % (
|
||||
k.ljust(longest),
|
||||
str(len(v)).rjust(6),
|
||||
sum([x * y for x, y in v])))
|
|
@ -1,32 +0,0 @@
|
|||
(function(exports) {
|
||||
"use strict";
|
||||
/*
|
||||
* A simpler boomerang: https://github.com/yahoo/boomerang that just
|
||||
* does navigation timing. Requires jquery.
|
||||
*/
|
||||
|
||||
exports.send = function(url) {
|
||||
/* Sends the timing data to the given URL */
|
||||
var perf = window.performance || window.msPerformance ||
|
||||
window.webkitPerformance || window.mozPerformance;
|
||||
if (perf) {
|
||||
setTimeout(function() {
|
||||
$.post(url, {
|
||||
'window.performance.timing.navigationStart': perf.timing.navigationStart,
|
||||
'window.performance.timing.domComplete': perf.timing.domComplete,
|
||||
'window.performance.timing.domInteractive': perf.timing.domInteractive,
|
||||
'window.performance.timing.domLoading': perf.timing.domLoading,
|
||||
'window.performance.timing.loadEventEnd': perf.timing.loadEventEnd,
|
||||
'window.performance.timing.responseStart': perf.timing.responseStart,
|
||||
'window.performance.navigation.redirectCount': perf.navigation.redirectCount,
|
||||
'window.performance.navigation.type': perf.navigation.type,
|
||||
'client': 'stick'
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
};
|
||||
|
||||
})(typeof exports === 'undefined' ? (this.stick = {}) : exports);
|
||||
|
||||
|
||||
|
|
@ -1,125 +0,0 @@
|
|||
<section id="statsd"
|
||||
data-graphite="{{ graphite }}"
|
||||
data-roots-timers="{% for root in timers %}{{ root }}{% if not forloop.last %}|{% endif %}{% endfor %}"
|
||||
data-roots-counts="{% for root in counts %}{{ root }}{% if not forloop.last %}|{% endif %}{% endfor %}">
|
||||
<table id="timings-summary">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Stat</th>
|
||||
<th>Count</th>
|
||||
<th>Sum</th>
|
||||
<th>Lower</th>
|
||||
<th>Mean</th>
|
||||
<th>Upper</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for value in timings_summary %}
|
||||
<tr>
|
||||
<td><a href="#" class="statsd" data-key="{{ value.stat }}" data-type="timing">{{ value.stat }}</a></td>
|
||||
<td>{{ value.count }}</td>
|
||||
<td>{{ value.sum }}</td>
|
||||
<td>{{ value.lower }}</td>
|
||||
<td>{{ value.mean|floatformat:"2" }}</td>
|
||||
<td>{{ value.upper }}</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<table id="timings" style="display: table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Stat</th>
|
||||
<th>Time (ms)</th>
|
||||
<th class="timeline" style="width: 99%">Timing</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for value in timings %}
|
||||
<tr>
|
||||
<td><a href="#" class="statsd" data-key="{{ value.0 }}" data-type="timing">{{ value.0 }}</a></td>
|
||||
<td>{{ value.3 }}</td>
|
||||
<td class="timeline">
|
||||
<div class="djDebugTimeline">
|
||||
<div class="djDebugLineChart djDebugLineChartSlave">
|
||||
<strong style="background: lightgrey; width: 0"> </strong>
|
||||
</div>
|
||||
<div class="djDebugLineChart djDebugLineChartActual" style="left: {{ value.1 }}%;">
|
||||
<strong style="width: {{ value.2 }}%; background: grey;"> </strong>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Stat</th>
|
||||
<th>Total</th>
|
||||
<th>Count</th>
|
||||
<th>Values</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for record in statsd %}
|
||||
<tr>
|
||||
<td><a href="#" class="statsd" data-key="{{ record.name }}" data-type="{{ record.type }}">{{ record.name }}</a></td>
|
||||
<td>{{ record.total }}{% if record.type == 'timing' %}ms{% endif %}</td>
|
||||
<td>{{ record.count }}</td>
|
||||
<td>
|
||||
{% if record.count > 1 %}
|
||||
<span class="values" data-count="{{ record.count }}">
|
||||
{% for value in record.values %}
|
||||
{{ value.0 }}{% if record.type == 'timing' %}ms{% endif %}|{{ value.1 }}{% if not forloop.last %},{% endif %}
|
||||
{% endfor %}
|
||||
</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</section>
|
||||
|
||||
<div id="graphs" img="graphite?width=586&height=308&target=root.key&target=root.key.lower&target=root.key.mean&target=root.key.upper_90&target=scale(root.key.count,0.1)&from=-24hours&title=24 hours"></div>
|
||||
|
||||
<script type="text/javascript">
|
||||
// TODO: inlining is bad, this should be external.
|
||||
|
||||
$(document).ready(function() {
|
||||
var graphite = $('#statsd').attr('data-graphite'),
|
||||
timers = $('#statsd').attr('data-roots-timers').split('|'),
|
||||
counts = $('#statsd').attr('data-roots-counts').split('|'),
|
||||
target = $('#graphs'),
|
||||
img = target.attr('img');
|
||||
|
||||
$('a.statsd').click(function() {
|
||||
var that = $(this),
|
||||
roots = timers;
|
||||
if (that.attr("data-type") == "count") {
|
||||
roots = counts;
|
||||
}
|
||||
target.html('');
|
||||
$.each(roots, function(root) {
|
||||
var custom = img.replace('graphite', graphite, 'g')
|
||||
.replace('root', roots[root], 'g')
|
||||
.replace('key', that.attr('data-key'), 'g');
|
||||
target.append('<p><b>' + roots[root] + '.' + that.attr('data-key') + '</b></p><img src="' + custom + '">');
|
||||
console.log(custom);
|
||||
})
|
||||
});
|
||||
|
||||
$('#djDebugStatsdPanel #timings td').click(function() {
|
||||
var currentRow = $(this).parent(),
|
||||
currentLine = currentRow.find('.djDebugLineChartActual'),
|
||||
currentBar = currentLine.children('strong'),
|
||||
table = currentLine.closest('table'),
|
||||
barLeft = currentLine.position().left,
|
||||
barWidth = currentBar.width();
|
||||
table.find('.djDebugLineChartSlave').css('left', barLeft).find('strong').css('width', barWidth);
|
||||
});
|
||||
})
|
||||
</script>
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
DATABASES = {
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': 'mydatabase'
|
||||
}
|
||||
}
|
||||
|
||||
ROOT_URLCONF = ''
|
||||
STATSD_CLIENT = 'django_statsd.clients.null'
|
||||
STATSD_PREFIX = None
|
||||
METLOG = None
|
||||
|
||||
SECRET_KEY = 'secret'
|
|
@ -1,579 +0,0 @@
|
|||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from django.conf import settings
|
||||
from nose.exc import SkipTest
|
||||
from nose import tools as nose_tools
|
||||
from unittest2 import skipUnless
|
||||
|
||||
from django import VERSION
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.http import HttpResponse, HttpResponseForbidden
|
||||
from django.test import TestCase
|
||||
from django.test.client import RequestFactory
|
||||
from django.utils import dictconfig
|
||||
from django.utils import unittest
|
||||
|
||||
import mock
|
||||
from nose.tools import eq_
|
||||
from django_statsd.clients import get_client, statsd
|
||||
from django_statsd.patches import utils
|
||||
from django_statsd.patches.db import (
|
||||
patched_callproc,
|
||||
patched_execute,
|
||||
patched_executemany,
|
||||
)
|
||||
from django_statsd import middleware
|
||||
|
||||
cfg = {
|
||||
'version': 1,
|
||||
'formatters': {},
|
||||
'handlers': {
|
||||
'test_statsd_handler': {
|
||||
'class': 'django_statsd.loggers.errors.StatsdHandler',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'test.logging': {
|
||||
'handlers': ['test_statsd_handler'],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@mock.patch.object(middleware.statsd, 'incr')
|
||||
class TestIncr(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.req = RequestFactory().get('/')
|
||||
self.res = HttpResponse()
|
||||
|
||||
def test_graphite_response(self, incr):
|
||||
gmw = middleware.GraphiteMiddleware()
|
||||
gmw.process_response(self.req, self.res)
|
||||
assert incr.called
|
||||
|
||||
def test_graphite_response_authenticated(self, incr):
|
||||
self.req.user = mock.Mock()
|
||||
self.req.user.is_authenticated.return_value = True
|
||||
gmw = middleware.GraphiteMiddleware()
|
||||
gmw.process_response(self.req, self.res)
|
||||
eq_(incr.call_count, 2)
|
||||
|
||||
def test_graphite_exception(self, incr):
|
||||
gmw = middleware.GraphiteMiddleware()
|
||||
gmw.process_exception(self.req, None)
|
||||
assert incr.called
|
||||
|
||||
def test_graphite_exception_authenticated(self, incr):
|
||||
self.req.user = mock.Mock()
|
||||
self.req.user.is_authenticated.return_value = True
|
||||
gmw = middleware.GraphiteMiddleware()
|
||||
gmw.process_exception(self.req, None)
|
||||
eq_(incr.call_count, 2)
|
||||
|
||||
|
||||
@mock.patch.object(middleware.statsd, 'timing')
|
||||
class TestTiming(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.req = RequestFactory().get('/')
|
||||
self.res = HttpResponse()
|
||||
|
||||
def test_request_timing(self, timing):
|
||||
func = lambda x: x
|
||||
gmw = middleware.GraphiteRequestTimingMiddleware()
|
||||
gmw.process_view(self.req, func, tuple(), dict())
|
||||
gmw.process_response(self.req, self.res)
|
||||
eq_(timing.call_count, 3)
|
||||
names = ['view.%s.%s.GET' % (func.__module__, func.__name__),
|
||||
'view.%s.GET' % func.__module__,
|
||||
'view.GET']
|
||||
for expected, (args, kwargs) in zip(names, timing.call_args_list):
|
||||
eq_(expected, args[0])
|
||||
|
||||
def test_request_timing_exception(self, timing):
|
||||
func = lambda x: x
|
||||
gmw = middleware.GraphiteRequestTimingMiddleware()
|
||||
gmw.process_view(self.req, func, tuple(), dict())
|
||||
gmw.process_exception(self.req, self.res)
|
||||
eq_(timing.call_count, 3)
|
||||
names = ['view.%s.%s.GET' % (func.__module__, func.__name__),
|
||||
'view.%s.GET' % func.__module__,
|
||||
'view.GET']
|
||||
for expected, (args, kwargs) in zip(names, timing.call_args_list):
|
||||
eq_(expected, args[0])
|
||||
|
||||
def test_request_timing_tastypie(self, timing):
|
||||
func = lambda x: x
|
||||
gmw = middleware.TastyPieRequestTimingMiddleware()
|
||||
gmw.process_view(self.req, func, tuple(), {
|
||||
'api_name': 'my_api_name',
|
||||
'resource_name': 'my_resource_name'
|
||||
})
|
||||
gmw.process_response(self.req, self.res)
|
||||
eq_(timing.call_count, 3)
|
||||
names = ['view.my_api_name.my_resource_name.GET',
|
||||
'view.my_api_name.GET',
|
||||
'view.GET']
|
||||
for expected, (args, kwargs) in zip(names, timing.call_args_list):
|
||||
eq_(expected, args[0])
|
||||
|
||||
def test_request_timing_tastypie_fallback(self, timing):
|
||||
func = lambda x: x
|
||||
gmw = middleware.TastyPieRequestTimingMiddleware()
|
||||
gmw.process_view(self.req, func, tuple(), dict())
|
||||
gmw.process_response(self.req, self.res)
|
||||
eq_(timing.call_count, 3)
|
||||
names = ['view.%s.%s.GET' % (func.__module__, func.__name__),
|
||||
'view.%s.GET' % func.__module__,
|
||||
'view.GET']
|
||||
for expected, (args, kwargs) in zip(names, timing.call_args_list):
|
||||
eq_(expected, args[0])
|
||||
|
||||
|
||||
class TestClient(unittest.TestCase):
|
||||
|
||||
@mock.patch.object(settings, 'STATSD_CLIENT', 'statsd.client')
|
||||
def test_normal(self):
|
||||
eq_(get_client().__module__, 'statsd.client')
|
||||
|
||||
@mock.patch.object(settings, 'STATSD_CLIENT',
|
||||
'django_statsd.clients.null')
|
||||
def test_null(self):
|
||||
eq_(get_client().__module__, 'django_statsd.clients.null')
|
||||
|
||||
@mock.patch.object(settings, 'STATSD_CLIENT',
|
||||
'django_statsd.clients.toolbar')
|
||||
def test_toolbar(self):
|
||||
eq_(get_client().__module__, 'django_statsd.clients.toolbar')
|
||||
|
||||
@mock.patch.object(settings, 'STATSD_CLIENT',
|
||||
'django_statsd.clients.toolbar')
|
||||
def test_toolbar_send(self):
|
||||
client = get_client()
|
||||
eq_(client.cache, {})
|
||||
client.incr('testing')
|
||||
eq_(client.cache, {'testing|count': [[1, 1]]})
|
||||
|
||||
|
||||
class TestMetlogClient(TestCase):
|
||||
|
||||
def check_metlog(self):
|
||||
try:
|
||||
from metlog.config import client_from_dict_config
|
||||
return client_from_dict_config
|
||||
except ImportError:
|
||||
raise SkipTest("Metlog is not installed")
|
||||
|
||||
@nose_tools.raises(AttributeError)
|
||||
def test_no_metlog(self):
|
||||
with self.settings(STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
get_client()
|
||||
|
||||
def _create_client(self):
|
||||
client_from_dict_config = self.check_metlog()
|
||||
|
||||
# Need to load within the test in case metlog is not installed
|
||||
from metlog.config import client_from_dict_config
|
||||
|
||||
METLOG_CONF = {
|
||||
'logger': 'django-statsd',
|
||||
'sender': {
|
||||
'class': 'metlog.senders.DebugCaptureSender',
|
||||
},
|
||||
}
|
||||
|
||||
return client_from_dict_config(METLOG_CONF)
|
||||
|
||||
def test_get_client(self):
|
||||
metlog = self._create_client()
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(client.__module__, 'django_statsd.clients.moz_metlog')
|
||||
|
||||
def test_metlog_incr(self):
|
||||
metlog = self._create_client()
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(len(client.metlog.sender.msgs), 0)
|
||||
client.incr('testing')
|
||||
eq_(len(client.metlog.sender.msgs), 1)
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[0])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '1')
|
||||
eq_(msg['fields']['rate'], 1)
|
||||
eq_(msg['fields']['name'], 'moz_metlog.testing')
|
||||
eq_(msg['type'], 'counter')
|
||||
|
||||
def test_metlog_decr(self):
|
||||
metlog = self._create_client()
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(len(client.metlog.sender.msgs), 0)
|
||||
client.decr('testing')
|
||||
eq_(len(client.metlog.sender.msgs), 1)
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[0])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '-1')
|
||||
eq_(msg['fields']['rate'], 1)
|
||||
eq_(msg['fields']['name'], 'moz_metlog.testing')
|
||||
eq_(msg['type'], 'counter')
|
||||
|
||||
def test_metlog_timing(self):
|
||||
metlog = self._create_client()
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(len(client.metlog.sender.msgs), 0)
|
||||
client.timing('testing', 512, rate=2)
|
||||
eq_(len(client.metlog.sender.msgs), 1)
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[0])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '512')
|
||||
eq_(msg['fields']['rate'], 2)
|
||||
eq_(msg['fields']['name'], 'moz_metlog.testing')
|
||||
eq_(msg['type'], 'timer')
|
||||
|
||||
@nose_tools.raises(AttributeError)
|
||||
def test_metlog_no_prefixes(self):
|
||||
metlog = self._create_client()
|
||||
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
client.incr('foo', 2)
|
||||
|
||||
def test_metlog_prefixes(self):
|
||||
metlog = self._create_client()
|
||||
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='some_prefix',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(len(client.metlog.sender.msgs), 0)
|
||||
|
||||
client.timing('testing', 512, rate=2)
|
||||
client.incr('foo', 2)
|
||||
client.decr('bar', 5)
|
||||
|
||||
eq_(len(client.metlog.sender.msgs), 3)
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[0])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '512')
|
||||
eq_(msg['fields']['rate'], 2)
|
||||
eq_(msg['fields']['name'], 'some_prefix.testing')
|
||||
eq_(msg['type'], 'timer')
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[1])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '2')
|
||||
eq_(msg['fields']['rate'], 1)
|
||||
eq_(msg['fields']['name'], 'some_prefix.foo')
|
||||
eq_(msg['type'], 'counter')
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[2])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '-5')
|
||||
eq_(msg['fields']['rate'], 1)
|
||||
eq_(msg['fields']['name'], 'some_prefix.bar')
|
||||
eq_(msg['type'], 'counter')
|
||||
|
||||
|
||||
# This is primarily for Zamboni, which loads in the custom middleware
|
||||
# classes, one of which, breaks posts to our url. Let's stop that.
|
||||
@mock.patch.object(settings, 'MIDDLEWARE_CLASSES', [])
|
||||
class TestRecord(TestCase):
|
||||
|
||||
urls = 'django_statsd.urls'
|
||||
|
||||
def setUp(self):
|
||||
super(TestRecord, self).setUp()
|
||||
self.url = reverse('django_statsd.record')
|
||||
settings.STATSD_RECORD_GUARD = None
|
||||
self.good = {'client': 'boomerang', 'nt_nav_st': 1,
|
||||
'nt_domcomp': 3}
|
||||
self.stick = {'client': 'stick',
|
||||
'window.performance.timing.domComplete': 123,
|
||||
'window.performance.timing.domInteractive': 456,
|
||||
'window.performance.timing.domLoading': 789,
|
||||
'window.performance.timing.navigationStart': 0,
|
||||
'window.performance.navigation.redirectCount': 3,
|
||||
'window.performance.navigation.type': 1}
|
||||
|
||||
def test_no_client(self):
|
||||
assert self.client.get(self.url).status_code == 400
|
||||
|
||||
def test_no_valid_client(self):
|
||||
assert self.client.get(self.url, {'client': 'no'}).status_code == 400
|
||||
|
||||
def test_boomerang_almost(self):
|
||||
assert self.client.get(self.url,
|
||||
{'client': 'boomerang'}).status_code == 400
|
||||
|
||||
def test_boomerang_minimum(self):
|
||||
assert self.client.get(self.url,
|
||||
{'client': 'boomerang',
|
||||
'nt_nav_st': 1}).content == 'recorded'
|
||||
|
||||
@mock.patch('django_statsd.views.process_key')
|
||||
def test_boomerang_something(self, process_key):
|
||||
assert self.client.get(self.url, self.good).content == 'recorded'
|
||||
assert process_key.called
|
||||
|
||||
def test_boomerang_post(self):
|
||||
assert self.client.post(self.url, self.good).status_code == 405
|
||||
|
||||
def test_good_guard(self):
|
||||
settings.STATSD_RECORD_GUARD = lambda r: None
|
||||
assert self.client.get(self.url, self.good).status_code == 200
|
||||
|
||||
def test_bad_guard(self):
|
||||
settings.STATSD_RECORD_GUARD = lambda r: HttpResponseForbidden()
|
||||
assert self.client.get(self.url, self.good).status_code == 403
|
||||
|
||||
def test_stick_get(self):
|
||||
assert self.client.get(self.url, self.stick).status_code == 405
|
||||
|
||||
@mock.patch('django_statsd.views.process_key')
|
||||
def test_stick(self, process_key):
|
||||
assert self.client.post(self.url, self.stick).status_code == 200
|
||||
assert process_key.called
|
||||
|
||||
def test_stick_start(self):
|
||||
data = self.stick.copy()
|
||||
del data['window.performance.timing.navigationStart']
|
||||
assert self.client.post(self.url, data).status_code == 400
|
||||
|
||||
@mock.patch('django_statsd.views.process_key')
|
||||
def test_stick_missing(self, process_key):
|
||||
data = self.stick.copy()
|
||||
del data['window.performance.timing.domInteractive']
|
||||
assert self.client.post(self.url, data).status_code == 200
|
||||
assert process_key.called
|
||||
|
||||
def test_stick_garbage(self):
|
||||
data = self.stick.copy()
|
||||
data['window.performance.timing.domInteractive'] = '<alert>'
|
||||
assert self.client.post(self.url, data).status_code == 400
|
||||
|
||||
def test_stick_some_garbage(self):
|
||||
data = self.stick.copy()
|
||||
data['window.performance.navigation.redirectCount'] = '<alert>'
|
||||
assert self.client.post(self.url, data).status_code == 400
|
||||
|
||||
def test_stick_more_garbage(self):
|
||||
data = self.stick.copy()
|
||||
data['window.performance.navigation.type'] = '<alert>'
|
||||
assert self.client.post(self.url, data).status_code == 400
|
||||
|
||||
|
||||
@mock.patch.object(middleware.statsd, 'incr')
|
||||
class TestErrorLog(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
dictconfig.dictConfig(cfg)
|
||||
self.log = logging.getLogger('test.logging')
|
||||
|
||||
def division_error(self):
|
||||
try:
|
||||
1 / 0
|
||||
except:
|
||||
return sys.exc_info()
|
||||
|
||||
def test_emit(self, incr):
|
||||
self.log.error('blargh!', exc_info=self.division_error())
|
||||
assert incr.call_args[0][0] == 'error.zerodivisionerror'
|
||||
|
||||
def test_not_emit(self, incr):
|
||||
self.log.error('blargh!')
|
||||
assert not incr.called
|
||||
|
||||
|
||||
class TestPatchMethod(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestPatchMethod, self).setUp()
|
||||
|
||||
class DummyClass(object):
|
||||
|
||||
def sumargs(self, a, b, c=3, d=4):
|
||||
return a + b + c + d
|
||||
|
||||
def badfn(self, a, b=2):
|
||||
raise ValueError
|
||||
|
||||
self.cls = DummyClass
|
||||
|
||||
def test_late_patching(self):
|
||||
"""
|
||||
Objects created before patching should get patched as well.
|
||||
"""
|
||||
def patch_fn(original_fn, self, *args, **kwargs):
|
||||
return original_fn(self, *args, **kwargs) + 10
|
||||
|
||||
obj = self.cls()
|
||||
self.assertEqual(obj.sumargs(1, 2, 3, 4), 10)
|
||||
utils.patch_method(self.cls, 'sumargs')(patch_fn)
|
||||
self.assertEqual(obj.sumargs(1, 2, 3, 4), 20)
|
||||
|
||||
def test_doesnt_call_original_implicitly(self):
|
||||
"""
|
||||
Original fn must be called explicitly from patched to be
|
||||
executed.
|
||||
"""
|
||||
def patch_fn(original_fn, self, *args, **kwargs):
|
||||
return 10
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
obj = self.cls()
|
||||
obj.badfn(1, 2)
|
||||
|
||||
utils.patch_method(self.cls, 'badfn')(patch_fn)
|
||||
self.assertEqual(obj.badfn(1, 2), 10)
|
||||
|
||||
def test_args_kwargs_are_honored(self):
|
||||
"""
|
||||
Args and kwargs must be honored between calls from the patched to
|
||||
the original version.
|
||||
"""
|
||||
def patch_fn(original_fn, self, *args, **kwargs):
|
||||
return original_fn(self, *args, **kwargs)
|
||||
|
||||
utils.patch_method(self.cls, 'sumargs')(patch_fn)
|
||||
obj = self.cls()
|
||||
self.assertEqual(obj.sumargs(1, 2), 10)
|
||||
self.assertEqual(obj.sumargs(1, 1, d=1), 6)
|
||||
self.assertEqual(obj.sumargs(1, 1, 1, 1), 4)
|
||||
|
||||
def test_patched_fn_can_receive_arbitrary_arguments(self):
|
||||
"""
|
||||
Args and kwargs can be received arbitrarily with no contraints on
|
||||
the patched fn, even if the original_fn had a fixed set of
|
||||
allowed args and kwargs.
|
||||
"""
|
||||
def patch_fn(original_fn, self, *args, **kwargs):
|
||||
return args, kwargs
|
||||
|
||||
utils.patch_method(self.cls, 'badfn')(patch_fn)
|
||||
obj = self.cls()
|
||||
self.assertEqual(obj.badfn(1, d=2), ((1,), {'d': 2}))
|
||||
self.assertEqual(obj.badfn(1, d=2), ((1,), {'d': 2}))
|
||||
self.assertEqual(obj.badfn(1, 2, c=1, d=2), ((1, 2), {'c': 1, 'd': 2}))
|
||||
|
||||
|
||||
class TestCursorWrapperPatching(TestCase):
|
||||
example_queries = {
|
||||
'select': 'select * from something;',
|
||||
'insert': 'insert (1, 2) into something;',
|
||||
'update': 'update something set a=1;',
|
||||
}
|
||||
|
||||
def test_patched_callproc_calls_timer(self):
|
||||
for operation, query in self.example_queries.items():
|
||||
with mock.patch.object(statsd, 'timer') as timer:
|
||||
client = mock.Mock(executable_name='client_executable_name')
|
||||
db = mock.Mock(executable_name='name', alias='alias', client=client)
|
||||
instance = mock.Mock(db=db)
|
||||
|
||||
patched_callproc(lambda *args, **kwargs: None, instance, query)
|
||||
|
||||
self.assertEqual(timer.call_count, 1)
|
||||
self.assertEqual(timer.call_args[0][0], 'db.client_executable_name.alias.callproc.%s' % operation)
|
||||
|
||||
def test_patched_execute_calls_timer(self):
|
||||
for operation, query in self.example_queries.items():
|
||||
with mock.patch.object(statsd, 'timer') as timer:
|
||||
client = mock.Mock(executable_name='client_executable_name')
|
||||
db = mock.Mock(executable_name='name', alias='alias', client=client)
|
||||
instance = mock.Mock(db=db)
|
||||
|
||||
patched_execute(lambda *args, **kwargs: None, instance, query)
|
||||
|
||||
self.assertEqual(timer.call_count, 1)
|
||||
self.assertEqual(timer.call_args[0][0], 'db.client_executable_name.alias.execute.%s' % operation)
|
||||
|
||||
def test_patched_executemany_calls_timer(self):
|
||||
for operation, query in self.example_queries.items():
|
||||
with mock.patch.object(statsd, 'timer') as timer:
|
||||
client = mock.Mock(executable_name='client_executable_name')
|
||||
db = mock.Mock(executable_name='name', alias='alias', client=client)
|
||||
instance = mock.Mock(db=db)
|
||||
|
||||
patched_executemany(lambda *args, **kwargs: None, instance, query)
|
||||
|
||||
self.assertEqual(timer.call_count, 1)
|
||||
self.assertEqual(timer.call_args[0][0], 'db.client_executable_name.alias.executemany.%s' % operation)
|
||||
|
||||
@mock.patch(
|
||||
'django_statsd.patches.db.pre_django_1_6_cursorwrapper_getattr')
|
||||
@mock.patch('django_statsd.patches.db.patched_executemany')
|
||||
@mock.patch('django_statsd.patches.db.patched_execute')
|
||||
@mock.patch('django.db.backends.util.CursorDebugWrapper')
|
||||
@skipUnless(VERSION < (1, 6, 0), "CursorWrapper Patching for Django<1.6")
|
||||
def test_cursorwrapper_patching(self,
|
||||
CursorDebugWrapper,
|
||||
execute,
|
||||
executemany,
|
||||
_getattr):
|
||||
try:
|
||||
from django.db.backends import util
|
||||
|
||||
# We need to patch CursorWrapper like this because setting
|
||||
# __getattr__ on Mock instances raises AttributeError.
|
||||
class CursorWrapper(object):
|
||||
pass
|
||||
|
||||
_CursorWrapper = util.CursorWrapper
|
||||
util.CursorWrapper = CursorWrapper
|
||||
|
||||
from django_statsd.patches.db import patch
|
||||
execute.__name__ = 'execute'
|
||||
executemany.__name__ = 'executemany'
|
||||
_getattr.__name__ = '_getattr'
|
||||
execute.return_value = 'execute'
|
||||
executemany.return_value = 'executemany'
|
||||
_getattr.return_value = 'getattr'
|
||||
patch()
|
||||
|
||||
self.assertEqual(CursorDebugWrapper.execute(), 'execute')
|
||||
self.assertEqual(CursorDebugWrapper.executemany(), 'executemany')
|
||||
self.assertEqual(CursorWrapper.__getattr__(), 'getattr')
|
||||
finally:
|
||||
util.CursorWrapper = _CursorWrapper
|
||||
|
||||
@mock.patch('django_statsd.patches.db.patched_callproc')
|
||||
@mock.patch('django_statsd.patches.db.patched_executemany')
|
||||
@mock.patch('django_statsd.patches.db.patched_execute')
|
||||
@mock.patch('django.db.backends.util.CursorWrapper')
|
||||
@skipUnless(VERSION >= (1, 6, 0), "CursorWrapper Patching for Django>=1.6")
|
||||
def test_cursorwrapper_patching16(self,
|
||||
CursorWrapper,
|
||||
execute,
|
||||
executemany,
|
||||
callproc):
|
||||
from django_statsd.patches.db import patch
|
||||
execute.__name__ = 'execute'
|
||||
executemany.__name__ = 'executemany'
|
||||
callproc.__name__ = 'callproc'
|
||||
execute.return_value = 'execute'
|
||||
executemany.return_value = 'executemany'
|
||||
callproc.return_value = 'callproc'
|
||||
patch()
|
||||
|
||||
self.assertEqual(CursorWrapper.execute(), 'execute')
|
||||
self.assertEqual(CursorWrapper.executemany(), 'executemany')
|
||||
self.assertEqual(CursorWrapper.callproc(), 'callproc')
|
|
@ -1,7 +0,0 @@
|
|||
from django.conf.urls import url
|
||||
|
||||
import django_statsd.views
|
||||
|
||||
urlpatterns = [
|
||||
url('^record$', django_statsd.views.record, name='django_statsd.record'),
|
||||
]
|
|
@ -1,173 +0,0 @@
|
|||
import collections
|
||||
from django import http
|
||||
from django.conf import settings
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.views.decorators.http import require_http_methods
|
||||
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
|
||||
boomerang = {
|
||||
'window.performance.navigation.redirectCount': 'nt_red_cnt',
|
||||
'window.performance.navigation.type': 'nt_nav_type',
|
||||
'window.performance.timing.connectEnd': 'nt_con_end',
|
||||
'window.performance.timing.connectStart': 'nt_con_st',
|
||||
'window.performance.timing.domComplete': 'nt_domcomp',
|
||||
'window.performance.timing.domContentLoaded': 'nt_domcontloaded',
|
||||
'window.performance.timing.domInteractive': 'nt_domint',
|
||||
'window.performance.timing.domLoading': 'nt_domloading',
|
||||
'window.performance.timing.domainLookupEnd': 'nt_dns_end',
|
||||
'window.performance.timing.domainLookupStart': 'nt_dns_st',
|
||||
'window.performance.timing.fetchStart': 'nt_fet_st',
|
||||
'window.performance.timing.loadEventEnd': 'nt_load_end',
|
||||
'window.performance.timing.loadEventStart': 'nt_load_st',
|
||||
'window.performance.timing.navigationStart': 'nt_nav_st',
|
||||
'window.performance.timing.redirectEnd': 'nt_red_end',
|
||||
'window.performance.timing.redirectStart': 'nt_red_st',
|
||||
'window.performance.timing.requestStart': 'nt_req_st',
|
||||
'window.performance.timing.responseEnd': 'nt_res_end',
|
||||
'window.performance.timing.responseStart': 'nt_res_st',
|
||||
'window.performance.timing.unloadEventEnd': 'nt_unload_end',
|
||||
'window.performance.timing.unloadEventStart': 'nt_unload_st'
|
||||
}
|
||||
|
||||
types = {
|
||||
'0': 'navigate',
|
||||
'1': 'reload',
|
||||
'2': 'back_forward',
|
||||
'255': 'reserved'
|
||||
}
|
||||
|
||||
# These are the default keys that we will try and record.
|
||||
stick_keys = [
|
||||
'window.performance.timing.domComplete',
|
||||
'window.performance.timing.domInteractive',
|
||||
'window.performance.timing.domLoading',
|
||||
'window.performance.timing.loadEventEnd',
|
||||
'window.performance.timing.responseStart',
|
||||
'window.performance.navigation.redirectCount',
|
||||
'window.performance.navigation.type',
|
||||
]
|
||||
|
||||
|
||||
def process_key(start, key, value):
|
||||
if 'timing' in key:
|
||||
# Some values will be zero. We want the output of that to
|
||||
# be zero relative to start.
|
||||
value = max(start, int(value)) - start
|
||||
statsd.timing(key, value)
|
||||
elif key == 'window.performance.navigation.type':
|
||||
statsd.incr('%s.%s' % (key, types[value]))
|
||||
elif key == 'window.performance.navigation.redirectCount':
|
||||
statsd.incr(key, int(value))
|
||||
|
||||
|
||||
def _process_summaries(start, keys):
|
||||
calculated = {
|
||||
'network': keys['window.performance.timing.responseStart'] - start,
|
||||
'app': (keys['window.performance.timing.domLoading'] -
|
||||
keys['window.performance.timing.responseStart']),
|
||||
'dom': (keys['window.performance.timing.domComplete'] -
|
||||
keys['window.performance.timing.domLoading']),
|
||||
'rendering': (keys['window.performance.timing.loadEventEnd'] -
|
||||
keys['window.performance.timing.domComplete']),
|
||||
}
|
||||
for k, v in list(calculated.items()):
|
||||
# If loadEventEnd still does not get populated, we could end up with
|
||||
# negative numbers here.
|
||||
statsd.timing('window.performance.calculated.%s' % k, max(v, 0))
|
||||
|
||||
|
||||
@require_http_methods(['GET', 'HEAD'])
|
||||
def _process_boomerang(request):
|
||||
if 'nt_nav_st' not in request.GET:
|
||||
raise ValueError(
|
||||
'nt_nav_st not in request.GET, make sure boomerang'
|
||||
' is made with navigation API timings as per the following'
|
||||
' http://yahoo.github.com/boomerang/doc/howtos/howto-9.html')
|
||||
|
||||
# This when the request started, everything else will be relative to this
|
||||
# for the purposes of statsd measurement.
|
||||
start = int(request.GET['nt_nav_st'])
|
||||
|
||||
keys = {}
|
||||
for k in getattr(settings, 'STATSD_RECORD_KEYS', stick_keys):
|
||||
v = request.GET.get(boomerang[k])
|
||||
if not v or v == 'undefined':
|
||||
continue
|
||||
if k in boomerang:
|
||||
process_key(start, k, v)
|
||||
keys[k] = int(v)
|
||||
|
||||
try:
|
||||
_process_summaries(start, keys)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
||||
@require_http_methods(['POST'])
|
||||
def _process_stick(request):
|
||||
start = request.POST.get('window.performance.timing.navigationStart', None)
|
||||
if not start:
|
||||
return http.HttpResponseBadRequest()
|
||||
|
||||
start = int(start)
|
||||
keys = {}
|
||||
for k in getattr(settings, 'STATSD_RECORD_KEYS', stick_keys):
|
||||
v = request.POST.get(k, None)
|
||||
if v:
|
||||
keys[k] = int(request.POST[k])
|
||||
process_key(start, k, request.POST[k])
|
||||
|
||||
# Only process the network when we have these.
|
||||
for key in ['window.performance.timing.loadEventEnd',
|
||||
'window.performance.timing.responseStart']:
|
||||
if key not in keys:
|
||||
return
|
||||
|
||||
_process_summaries(start, keys)
|
||||
|
||||
|
||||
clients = {
|
||||
'boomerang': _process_boomerang,
|
||||
'stick': _process_stick,
|
||||
}
|
||||
|
||||
|
||||
@csrf_exempt
|
||||
@require_http_methods(["GET", "POST"])
|
||||
def record(request):
|
||||
"""
|
||||
This is a Django method you can link to in your URLs that process
|
||||
the incoming data. Be sure to add a client parameter into your request
|
||||
so that we can figure out how to process this request. For example
|
||||
if you are using boomerang, you'll need: client = boomerang.
|
||||
|
||||
You can define a method in STATSD_RECORD_GUARD that will do any lookup
|
||||
you need for imposing security on this method, so that not just anyone
|
||||
can post to it.
|
||||
"""
|
||||
data = request.POST or request.GET
|
||||
if 'client' not in data:
|
||||
return http.HttpResponseBadRequest()
|
||||
|
||||
client = data.get('client')
|
||||
if client not in clients:
|
||||
return http.HttpResponseBadRequest()
|
||||
|
||||
guard = getattr(settings, 'STATSD_RECORD_GUARD', None)
|
||||
if guard:
|
||||
if not isinstance(guard, collections.Callable):
|
||||
raise ValueError('STATSD_RECORD_GUARD must be callable')
|
||||
result = guard(request)
|
||||
if result:
|
||||
return result
|
||||
|
||||
try:
|
||||
response = clients[client](request)
|
||||
except (ValueError, KeyError):
|
||||
return http.HttpResponseBadRequest()
|
||||
|
||||
if response:
|
||||
return response
|
||||
return http.HttpResponse('recorded')
|
241
docs/conf.py
241
docs/conf.py
|
@ -1,241 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# django-statsd documentation build configuration file, created by
|
||||
# sphinx-quickstart on Fri Apr 27 17:30:33 2012.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys, os
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration -----------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = 'django-statsd'
|
||||
copyright = '2012, Andy McKay'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.3'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
|
||||
# -- Options for HTML output ---------------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'django-statsddoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output --------------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'django-statsd.tex', 'django-statsd Documentation',
|
||||
'Andy McKay', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output --------------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'django-statsd', 'django-statsd Documentation',
|
||||
['Andy McKay'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output ------------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'django-statsd', 'django-statsd Documentation', 'Andy McKay',
|
||||
'django-statsd', 'One line description of project.', 'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
419
docs/index.rst
419
docs/index.rst
|
@ -1,419 +0,0 @@
|
|||
django-statsd
|
||||
=========================================
|
||||
|
||||
Django Statsd
|
||||
=============
|
||||
|
||||
Integration between statsd and django. It allows you to use different clients,
|
||||
sends timings as middleware and integrates with django debug toolbar.
|
||||
|
||||
Credits:
|
||||
|
||||
- jbalogh and jsocol for statsd and commonware, which I just ripped parts out
|
||||
of and put in here.
|
||||
- robhudson for django-debug-toolbar
|
||||
|
||||
Changes
|
||||
-------
|
||||
|
||||
0.3.15:
|
||||
|
||||
- push from travis to pypi to keep files clean
|
||||
- allow less statsd in the middleware
|
||||
- fix to a specific statsd version
|
||||
|
||||
0.3.14:
|
||||
|
||||
- pypy testing support
|
||||
- log model changes
|
||||
- log celery events
|
||||
- log db queries
|
||||
- show lower/mean/upper values in debugbar, thanks jonathanslenders!
|
||||
|
||||
0.3.12:
|
||||
|
||||
- Event better Django 1.6 support for the patches, with tests.
|
||||
|
||||
0.3.11:
|
||||
|
||||
- Django 1.6 support
|
||||
|
||||
0.3.9:
|
||||
|
||||
- statsd 2.0 support
|
||||
|
||||
- improved Django debug toolbar support
|
||||
|
||||
0.3.8.5:
|
||||
|
||||
- don't count some 404 as 500 and fix deprecation warnings
|
||||
|
||||
0.3.8.4:
|
||||
|
||||
- gauge support
|
||||
|
||||
0.3.8.3:
|
||||
|
||||
- some bug fixes
|
||||
|
||||
0.3.8.1:
|
||||
|
||||
- add in a tasty pie middleware
|
||||
|
||||
0.3.8:
|
||||
|
||||
- add in a nose plugin
|
||||
|
||||
0.3.7:
|
||||
|
||||
- add in metlog client
|
||||
|
||||
0.3.6:
|
||||
|
||||
- add in log handler
|
||||
|
||||
0.3.5:
|
||||
|
||||
- fix tests to work standalone
|
||||
- add in waterfall view of timings
|
||||
|
||||
0.3.3:
|
||||
|
||||
- fix setup.py to include loggers etc
|
||||
|
||||
0.3.2:
|
||||
|
||||
- update to work with latest Django Debug Toolbar
|
||||
|
||||
0.3:
|
||||
|
||||
- added in logging handler for logging error counts to stats
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
From pypi::
|
||||
|
||||
pip install django-statsd-mozilla
|
||||
|
||||
Because there is already a django-statsd on pypi.
|
||||
|
||||
Requirement, https://github.com/jsocol/pystatsd or::
|
||||
|
||||
pip install statsd
|
||||
|
||||
Because there is already a pystatsd on pypi. This will be automatically added
|
||||
when you install django-statsd-mozilla.
|
||||
|
||||
First off, pick your client, one of:
|
||||
|
||||
- django_statsd.clients.null
|
||||
|
||||
This one does nothing, good for development. No point in wasting those UDP
|
||||
packets.
|
||||
|
||||
- django_statsd.clients.toolbar
|
||||
|
||||
Use for the django debug toolbar, stores all the statsd pings on the request
|
||||
so they can be used in the toolbar.
|
||||
|
||||
- django_statsd.clients.normal
|
||||
|
||||
Use this for production, it just passes through to the real actual pystatsd.
|
||||
|
||||
- django_statsd.clients.log
|
||||
|
||||
Just writes the values to a log file using Python's logging module.
|
||||
|
||||
- django_statsd.clients.moz_metlog
|
||||
|
||||
Use this to route messages through
|
||||
_metlog: http://github.com/mozilla-services/metlog-py. Note that
|
||||
using metlog will require you to bind the metlog instance to bind
|
||||
the metlog client instance as settings.METLOG.
|
||||
|
||||
- django_statsd.clients.nose
|
||||
|
||||
Route messages through to the nose plugin. This also works with the toolbar
|
||||
client, so you don't need to change them on -dev.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
To send timings from your code, use just like pystatsd, but change your imports
|
||||
to read::
|
||||
|
||||
from django_statsd.clients import statsd
|
||||
|
||||
For example::
|
||||
|
||||
from django_statsd.clients import statsd
|
||||
statsd.incr('response.200')
|
||||
|
||||
Django statsd will choose the client as specified in your config and send the
|
||||
data to it. You can change your client by specifying it in the config, the
|
||||
default is::
|
||||
|
||||
STATSD_CLIENT = 'django_statsd.clients.normal'
|
||||
|
||||
To send timings or counts with every request, add in some middleware::
|
||||
|
||||
MIDDLEWARE_CLASSES = (
|
||||
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
|
||||
'django_statsd.middleware.GraphiteMiddleware',
|
||||
) + MIDDLEWARE_CLASSES
|
||||
|
||||
If you are using tastypie, you might enjoy::
|
||||
|
||||
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
|
||||
|
||||
To get timings for your database or your cache, put in some monkeypatches::
|
||||
|
||||
STATSD_PATCHES = [
|
||||
'django_statsd.patches.db',
|
||||
'django_statsd.patches.cache',
|
||||
]
|
||||
|
||||
You can change the host that stats are sent to with the `STATSD_HOST` setting::
|
||||
|
||||
STATSD_HOST = 'localhost'
|
||||
|
||||
Similarly, you can use the `STATSD_PORT`setting to customize the port number (which defaults to `8125`)::
|
||||
|
||||
STATSD_PORT = 8125
|
||||
|
||||
Toolbar integration
|
||||
-------------------
|
||||
|
||||
Make sure `django_statsd` is installed::
|
||||
|
||||
INSTALLED_APPS = (
|
||||
..
|
||||
'django_statsd',
|
||||
)
|
||||
|
||||
This will show you the statsd timings in the toolbar::
|
||||
|
||||
MIDDLEWARE_CLASSES = (
|
||||
'debug_toolbar.middleware.DebugToolbarMiddleware',
|
||||
) + MIDDLEWARE_CLASSES
|
||||
|
||||
Note: this must go before the GraphiteMiddleware so that we've got the timing
|
||||
data in before we show the toolbar panel.
|
||||
|
||||
Add in the panel::
|
||||
|
||||
DEBUG_TOOLBAR_PANELS = (
|
||||
...
|
||||
'django_statsd.panel.StatsdPanel',
|
||||
)
|
||||
|
||||
Set the client::
|
||||
|
||||
STATSD_CLIENT = 'django_statsd.clients.toolbar'
|
||||
|
||||
Finally if you have production data coming into a graphite server, you can
|
||||
show data from that server. If you have one, link it up::
|
||||
|
||||
Here's the configuration we use on AMO. Because triggers and counts go
|
||||
to different spots, you can configure them differently::
|
||||
|
||||
TOOLBAR_STATSD = {
|
||||
'graphite': 'https://graphite-phx.mozilla.org/render/',
|
||||
'roots': {
|
||||
'timers': ['stats.timers.addons-dev', 'stats.timers.addons'],
|
||||
'counts': ['stats.addons-dev', 'stats.addons']
|
||||
}
|
||||
}
|
||||
|
||||
The key is added on to the root. So if you've got a key of `view.GET` this
|
||||
would look that up on the graphite server with the key::
|
||||
|
||||
stats.addons.view.GET
|
||||
|
||||
Django Model save and delete integration
|
||||
----------------------------------------
|
||||
|
||||
You can log all create, update and delete events of django models.
|
||||
Add to your Django settings::
|
||||
|
||||
STATSD_MODEL_SIGNALS = True
|
||||
|
||||
Celery signals integration
|
||||
--------------------------
|
||||
|
||||
You can log all the ``task_sent``, ``task_prerun``, ``task_postrun`` and
|
||||
``task_failure`` signals of celery along with the duration of succesful tasks.
|
||||
|
||||
To enable this, add the following to your Django settings::
|
||||
|
||||
STATSD_CELERY_SIGNALS = True
|
||||
|
||||
Front end timing integration
|
||||
----------------------------
|
||||
|
||||
New browsers come with an API to provide timing information, see:
|
||||
|
||||
http://w3c-test.org/webperf/specs/NavigationTiming/
|
||||
|
||||
To record this in statsd you need a JavaScript lib on the front end to send
|
||||
data to the server. You then use the server to record the information. This
|
||||
library provides a view to hook that up for different libraries.
|
||||
|
||||
First, make sure you can record the timings in your Django site urls. This
|
||||
could be done by pointing straight to the view or including the URL for
|
||||
example::
|
||||
|
||||
from django_statsd.urls import urlpatterns as statsd_patterns
|
||||
|
||||
urlpatterns = [
|
||||
url(r'^services/timing/', include(statsd_patterns)),
|
||||
]
|
||||
|
||||
In this case the URL to the record view will be `/services/timing/record`.
|
||||
|
||||
Second, hook up the client. There is a un-sophisticated client called `stick`
|
||||
included in the static directory. This requires no configuration on your part,
|
||||
just make sure that the file `django_statsd/static/stick.js` is in your sites
|
||||
JS.
|
||||
|
||||
Then call it in the following manner::
|
||||
|
||||
stick.send('/services/timing/record');
|
||||
|
||||
We also include support for `boomerang`, a sophisticated client from Yahoo:
|
||||
|
||||
http://yahoo.github.com/boomerang
|
||||
|
||||
To hook this up, first add in boomerang to your site, make sure you use the web
|
||||
timing enabled version, as discussed here:
|
||||
|
||||
http://yahoo.github.com/boomerang/doc/howtos/howto-9.html
|
||||
|
||||
When the script is added to your site, add the following JS::
|
||||
|
||||
BOOMR.init({
|
||||
beacon_url: '/services/timing/record'
|
||||
}).addVar('client', 'boomerang');
|
||||
|
||||
Once you've installed either boomerang or stick, you'll see the following keys
|
||||
sent::
|
||||
|
||||
window.performance.timing.domComplete 5309|ms
|
||||
window.performance.timing.domInteractive 3819|ms
|
||||
window.performance.timing.domLoading 1780|ms
|
||||
window.performance.navigation.redirectCount 0|c
|
||||
window.performance.navigation.type.reload 1|c
|
||||
|
||||
There's a couple of options with this you can set in settings::
|
||||
|
||||
STATSD_RECORD_KEYS (optional)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
A list of the keys you want to record, there's quite a few in the timing api
|
||||
and you likely don't want to record them all. Here's the default::
|
||||
|
||||
STATSD_RECORD_KEYS = [
|
||||
'window.performance.timing.domComplete',
|
||||
'window.performance.timing.domInteractive',
|
||||
'window.performance.timing.domLoading',
|
||||
'window.performance.navigation.redirectCount',
|
||||
'window.performance.navigation.type',
|
||||
]
|
||||
|
||||
Override this to get different ones.
|
||||
|
||||
STATSD_RECORD_GUARD (optional)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
There's only limited ways to stop people posting junk to your URLs. By defining
|
||||
a this a function you can do some work to allow requests to your needs. If the
|
||||
function returns None, the request is allowed through. If you don't want to
|
||||
allow the request, return any valid Django HTTP response. For example to deny
|
||||
everyone not in INTERNAL_IPS::
|
||||
|
||||
from django.http import HttpResponseForbidden
|
||||
|
||||
def internal_only(request):
|
||||
if request.META['REMOTE_ADDR'] not in INTERNAL_IPS:
|
||||
return HttpResponseForbidden()
|
||||
|
||||
STATSD_RECORD_GUARD = internal_only
|
||||
|
||||
STATSD_VIEW_TIMER_DETAILS (optional)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The middleware sends timing pings for the almost the same thing three times
|
||||
when accessing a view: `module.name.method`, `module.method` and `method` by
|
||||
default. Setting this to `False` just does the former.
|
||||
|
||||
Logging errors
|
||||
~~~~~~~~~~~~~~
|
||||
|
||||
If you want to log a count of the errors in your application to statsd, you can
|
||||
do this by adding in the handler. For example in your logging configuration::
|
||||
|
||||
'handlers': {
|
||||
'test_statsd_handler': {
|
||||
'class': 'django_statsd.loggers.errors.StatsdHandler',
|
||||
},
|
||||
}
|
||||
|
||||
Testing
|
||||
=======
|
||||
|
||||
You need to install tox_ to run the tests.
|
||||
You can run the full test matrix with:
|
||||
|
||||
tox
|
||||
|
||||
or choose a specific environment - let's say Python 3.4 and Django 1.11 - with:
|
||||
|
||||
tox -e py34-django111
|
||||
|
||||
You can list all the available environments with:
|
||||
|
||||
tox -l
|
||||
|
||||
.. _tox: http://tox.readthedocs.io/en/latest/index.html
|
||||
|
||||
Nose
|
||||
====
|
||||
|
||||
There is also a nose plugin. If you use nose, then run tests, you'll get output
|
||||
in your tests. To use run tests with the following::
|
||||
|
||||
--with-statsd
|
||||
|
||||
Contributors
|
||||
~~~~~~~~~~~~
|
||||
|
||||
* streeter
|
||||
* crankycoder
|
||||
* glogiotatidis
|
||||
* tominsam
|
||||
* youngbob
|
||||
* jsatt
|
||||
* youngbob
|
||||
* jsocol
|
||||
* janfabry
|
||||
* tomchristie
|
||||
* diox
|
||||
* frewsxcv
|
||||
* fud
|
||||
* ftobia
|
||||
* jawnb
|
||||
* fgallina
|
||||
* jonathanslenders
|
||||
* streeter
|
||||
|
||||
See:
|
||||
|
||||
https://github.com/andymckay/django-statsd/pulls?direction=desc&page=1&sort=created&state=closed
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
BIN
example.png
BIN
example.png
Binary file not shown.
Before Width: | Height: | Size: 148 KiB |
|
@ -1 +0,0 @@
|
|||
metlog-py
|
|
@ -1,4 +0,0 @@
|
|||
mock
|
||||
nose
|
||||
pytest-django
|
||||
statsd==3.2.1
|
36
setup.py
36
setup.py
|
@ -1,36 +0,0 @@
|
|||
from setuptools import setup
|
||||
|
||||
|
||||
setup(
|
||||
# Because django-statsd was taken, I called this django-statsd-mozilla.
|
||||
name='django-statsd-mozilla',
|
||||
version='0.3.16',
|
||||
description='Django interface with statsd',
|
||||
long_description=open('README.rst').read(),
|
||||
author='Andy McKay',
|
||||
author_email='andym@mozilla.com',
|
||||
license='BSD',
|
||||
install_requires=['statsd >= 2.1.2, != 3.2 , <= 4.0'],
|
||||
packages=['django_statsd',
|
||||
'django_statsd/patches',
|
||||
'django_statsd/clients',
|
||||
'django_statsd/loggers',
|
||||
'django_statsd/management',
|
||||
'django_statsd/management/commands'],
|
||||
url='https://github.com/andymckay/django-statsd',
|
||||
entry_points={
|
||||
'nose.plugins.0.10': [
|
||||
'django_statsd = django_statsd:NoseStatsd'
|
||||
]
|
||||
},
|
||||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
classifiers=[
|
||||
'Intended Audience :: Developers',
|
||||
'Natural Language :: English',
|
||||
'Operating System :: OS Independent',
|
||||
'Framework :: Django',
|
||||
'Programming Language :: Python',
|
||||
'Programming Language :: Python :: 3'
|
||||
]
|
||||
)
|
|
@ -1,34 +0,0 @@
|
|||
|
||||
def pytest_configure():
|
||||
from django.conf import settings
|
||||
|
||||
settings.configure(
|
||||
DEBUG_PROPAGATE_EXCEPTIONS=True,
|
||||
DATABASES={
|
||||
'default': {
|
||||
'ENGINE': 'django.db.backends.sqlite3',
|
||||
'NAME': ':memory:'
|
||||
}
|
||||
},
|
||||
SITE_ID=1,
|
||||
SECRET_KEY='not very secret in tests',
|
||||
ROOT_URLCONF='django_statsd.urls',
|
||||
INSTALLED_APPS=(
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.sessions',
|
||||
'django.contrib.sites',
|
||||
'django.contrib.staticfiles',
|
||||
'django_statsd',
|
||||
'tests',
|
||||
),
|
||||
STATSD_CLIENT='django_statsd.clients.null',
|
||||
STATSD_PREFIX=None,
|
||||
METLOG=None,
|
||||
)
|
||||
|
||||
try:
|
||||
import django
|
||||
django.setup()
|
||||
except AttributeError:
|
||||
pass
|
|
@ -1,546 +0,0 @@
|
|||
import json
|
||||
import logging.config
|
||||
import sys
|
||||
import unittest
|
||||
|
||||
from django.conf import settings
|
||||
from nose.exc import SkipTest
|
||||
from nose import tools as nose_tools
|
||||
|
||||
try:
|
||||
from django.urls import reverse
|
||||
except ImportError:
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.http import HttpResponse, HttpResponseForbidden
|
||||
from django.test import TestCase
|
||||
from django.test.client import RequestFactory
|
||||
from django.utils.http import urlencode
|
||||
|
||||
import mock
|
||||
from nose.tools import eq_
|
||||
from django_statsd.clients import get_client, statsd
|
||||
from django_statsd.patches import utils
|
||||
from django_statsd.patches.db import (
|
||||
patched_callproc,
|
||||
patched_execute,
|
||||
patched_executemany,
|
||||
)
|
||||
from django_statsd import middleware
|
||||
|
||||
cfg = {
|
||||
'version': 1,
|
||||
'formatters': {},
|
||||
'handlers': {
|
||||
'test_statsd_handler': {
|
||||
'class': 'django_statsd.loggers.errors.StatsdHandler',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'test.logging': {
|
||||
'handlers': ['test_statsd_handler'],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@mock.patch.object(middleware.statsd, 'incr')
|
||||
class TestIncr(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.req = RequestFactory().get('/')
|
||||
self.res = HttpResponse()
|
||||
|
||||
def test_graphite_response(self, incr):
|
||||
gmw = middleware.GraphiteMiddleware()
|
||||
gmw.process_response(self.req, self.res)
|
||||
assert incr.called
|
||||
|
||||
def test_graphite_response_authenticated(self, incr):
|
||||
self.req.user = mock.Mock()
|
||||
self.req.user.is_authenticated.return_value = True
|
||||
gmw = middleware.GraphiteMiddleware()
|
||||
gmw.process_response(self.req, self.res)
|
||||
eq_(incr.call_count, 2)
|
||||
|
||||
def test_graphite_exception(self, incr):
|
||||
gmw = middleware.GraphiteMiddleware()
|
||||
gmw.process_exception(self.req, None)
|
||||
assert incr.called
|
||||
|
||||
def test_graphite_exception_authenticated(self, incr):
|
||||
self.req.user = mock.Mock()
|
||||
self.req.user.is_authenticated.return_value = True
|
||||
gmw = middleware.GraphiteMiddleware()
|
||||
gmw.process_exception(self.req, None)
|
||||
eq_(incr.call_count, 2)
|
||||
|
||||
|
||||
@mock.patch.object(middleware.statsd, 'timing')
|
||||
class TestTiming(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.req = RequestFactory().get('/')
|
||||
self.res = HttpResponse()
|
||||
|
||||
def test_request_timing(self, timing):
|
||||
func = lambda x: x
|
||||
gmw = middleware.GraphiteRequestTimingMiddleware()
|
||||
gmw.process_view(self.req, func, tuple(), dict())
|
||||
gmw.process_response(self.req, self.res)
|
||||
eq_(timing.call_count, 3)
|
||||
names = ['view.%s.%s.GET' % (func.__module__, func.__name__),
|
||||
'view.%s.GET' % func.__module__,
|
||||
'view.GET']
|
||||
for expected, (args, kwargs) in zip(names, timing.call_args_list):
|
||||
eq_(expected, args[0])
|
||||
|
||||
def test_request_timing_exception(self, timing):
|
||||
func = lambda x: x
|
||||
gmw = middleware.GraphiteRequestTimingMiddleware()
|
||||
gmw.process_view(self.req, func, tuple(), dict())
|
||||
gmw.process_exception(self.req, self.res)
|
||||
eq_(timing.call_count, 3)
|
||||
names = ['view.%s.%s.GET' % (func.__module__, func.__name__),
|
||||
'view.%s.GET' % func.__module__,
|
||||
'view.GET']
|
||||
for expected, (args, kwargs) in zip(names, timing.call_args_list):
|
||||
eq_(expected, args[0])
|
||||
|
||||
def test_request_timing_tastypie(self, timing):
|
||||
func = lambda x: x
|
||||
gmw = middleware.TastyPieRequestTimingMiddleware()
|
||||
gmw.process_view(self.req, func, tuple(), {
|
||||
'api_name': 'my_api_name',
|
||||
'resource_name': 'my_resource_name'
|
||||
})
|
||||
gmw.process_response(self.req, self.res)
|
||||
eq_(timing.call_count, 3)
|
||||
names = ['view.my_api_name.my_resource_name.GET',
|
||||
'view.my_api_name.GET',
|
||||
'view.GET']
|
||||
for expected, (args, kwargs) in zip(names, timing.call_args_list):
|
||||
eq_(expected, args[0])
|
||||
|
||||
def test_request_timing_tastypie_fallback(self, timing):
|
||||
func = lambda x: x
|
||||
gmw = middleware.TastyPieRequestTimingMiddleware()
|
||||
gmw.process_view(self.req, func, tuple(), dict())
|
||||
gmw.process_response(self.req, self.res)
|
||||
eq_(timing.call_count, 3)
|
||||
names = ['view.%s.%s.GET' % (func.__module__, func.__name__),
|
||||
'view.%s.GET' % func.__module__,
|
||||
'view.GET']
|
||||
for expected, (args, kwargs) in zip(names, timing.call_args_list):
|
||||
eq_(expected, args[0])
|
||||
|
||||
|
||||
class TestClient(unittest.TestCase):
|
||||
|
||||
@mock.patch.object(settings, 'STATSD_CLIENT', 'statsd.client')
|
||||
def test_normal(self):
|
||||
eq_(get_client().__module__, 'statsd.client')
|
||||
|
||||
@mock.patch.object(settings, 'STATSD_CLIENT',
|
||||
'django_statsd.clients.null')
|
||||
def test_null(self):
|
||||
eq_(get_client().__module__, 'django_statsd.clients.null')
|
||||
|
||||
@mock.patch.object(settings, 'STATSD_CLIENT',
|
||||
'django_statsd.clients.toolbar')
|
||||
def test_toolbar(self):
|
||||
eq_(get_client().__module__, 'django_statsd.clients.toolbar')
|
||||
|
||||
@mock.patch.object(settings, 'STATSD_CLIENT',
|
||||
'django_statsd.clients.toolbar')
|
||||
def test_toolbar_send(self):
|
||||
client = get_client()
|
||||
eq_(client.cache, {})
|
||||
client.incr('testing')
|
||||
eq_(client.cache, {'testing|count': [[1, 1]]})
|
||||
|
||||
|
||||
class TestMetlogClient(TestCase):
|
||||
|
||||
def check_metlog(self):
|
||||
try:
|
||||
from metlog.config import client_from_dict_config
|
||||
return client_from_dict_config
|
||||
except ImportError:
|
||||
raise SkipTest("Metlog is not installed")
|
||||
|
||||
@nose_tools.raises(AttributeError)
|
||||
def test_no_metlog(self):
|
||||
with self.settings(STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
get_client()
|
||||
|
||||
def _create_client(self):
|
||||
client_from_dict_config = self.check_metlog()
|
||||
|
||||
# Need to load within the test in case metlog is not installed
|
||||
from metlog.config import client_from_dict_config
|
||||
|
||||
METLOG_CONF = {
|
||||
'logger': 'django-statsd',
|
||||
'sender': {
|
||||
'class': 'metlog.senders.DebugCaptureSender',
|
||||
},
|
||||
}
|
||||
|
||||
return client_from_dict_config(METLOG_CONF)
|
||||
|
||||
def test_get_client(self):
|
||||
metlog = self._create_client()
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(client.__module__, 'django_statsd.clients.moz_metlog')
|
||||
|
||||
def test_metlog_incr(self):
|
||||
metlog = self._create_client()
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(len(client.metlog.sender.msgs), 0)
|
||||
client.incr('testing')
|
||||
eq_(len(client.metlog.sender.msgs), 1)
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[0])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '1')
|
||||
eq_(msg['fields']['rate'], 1)
|
||||
eq_(msg['fields']['name'], 'moz_metlog.testing')
|
||||
eq_(msg['type'], 'counter')
|
||||
|
||||
def test_metlog_decr(self):
|
||||
metlog = self._create_client()
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(len(client.metlog.sender.msgs), 0)
|
||||
client.decr('testing')
|
||||
eq_(len(client.metlog.sender.msgs), 1)
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[0])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '-1')
|
||||
eq_(msg['fields']['rate'], 1)
|
||||
eq_(msg['fields']['name'], 'moz_metlog.testing')
|
||||
eq_(msg['type'], 'counter')
|
||||
|
||||
def test_metlog_timing(self):
|
||||
metlog = self._create_client()
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='moz_metlog',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(len(client.metlog.sender.msgs), 0)
|
||||
client.timing('testing', 512, rate=2)
|
||||
eq_(len(client.metlog.sender.msgs), 1)
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[0])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '512')
|
||||
eq_(msg['fields']['rate'], 2)
|
||||
eq_(msg['fields']['name'], 'moz_metlog.testing')
|
||||
eq_(msg['type'], 'timer')
|
||||
|
||||
@nose_tools.raises(AttributeError)
|
||||
def test_metlog_no_prefixes(self):
|
||||
metlog = self._create_client()
|
||||
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
client.incr('foo', 2)
|
||||
|
||||
def test_metlog_prefixes(self):
|
||||
metlog = self._create_client()
|
||||
|
||||
with self.settings(METLOG=metlog,
|
||||
STATSD_PREFIX='some_prefix',
|
||||
STATSD_CLIENT='django_statsd.clients.moz_metlog'):
|
||||
client = get_client()
|
||||
eq_(len(client.metlog.sender.msgs), 0)
|
||||
|
||||
client.timing('testing', 512, rate=2)
|
||||
client.incr('foo', 2)
|
||||
client.decr('bar', 5)
|
||||
|
||||
eq_(len(client.metlog.sender.msgs), 3)
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[0])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '512')
|
||||
eq_(msg['fields']['rate'], 2)
|
||||
eq_(msg['fields']['name'], 'some_prefix.testing')
|
||||
eq_(msg['type'], 'timer')
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[1])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '2')
|
||||
eq_(msg['fields']['rate'], 1)
|
||||
eq_(msg['fields']['name'], 'some_prefix.foo')
|
||||
eq_(msg['type'], 'counter')
|
||||
|
||||
msg = json.loads(client.metlog.sender.msgs[2])
|
||||
eq_(msg['severity'], 6)
|
||||
eq_(msg['payload'], '-5')
|
||||
eq_(msg['fields']['rate'], 1)
|
||||
eq_(msg['fields']['name'], 'some_prefix.bar')
|
||||
eq_(msg['type'], 'counter')
|
||||
|
||||
|
||||
# This is primarily for Zamboni, which loads in the custom middleware
|
||||
# classes, one of which, breaks posts to our url. Let's stop that.
|
||||
@mock.patch.object(settings, 'MIDDLEWARE_CLASSES', [])
|
||||
class TestRecord(TestCase):
|
||||
|
||||
urls = 'django_statsd.urls'
|
||||
|
||||
def setUp(self):
|
||||
super(TestRecord, self).setUp()
|
||||
self.url = reverse('django_statsd.record')
|
||||
settings.STATSD_RECORD_GUARD = None
|
||||
self.good = {'client': 'boomerang', 'nt_nav_st': 1,
|
||||
'nt_domcomp': 3}
|
||||
self.stick = {'client': 'stick',
|
||||
'window.performance.timing.domComplete': 123,
|
||||
'window.performance.timing.domInteractive': 456,
|
||||
'window.performance.timing.domLoading': 789,
|
||||
'window.performance.timing.navigationStart': 0,
|
||||
'window.performance.navigation.redirectCount': 3,
|
||||
'window.performance.navigation.type': 1}
|
||||
|
||||
def test_no_client(self):
|
||||
response = self.client.get(self.url)
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_no_valid_client(self):
|
||||
response = self.client.get(self.url, {'client': 'no'})
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_boomerang_almost(self):
|
||||
response = self.client.get(self.url, {'client': 'boomerang'})
|
||||
assert response.status_code == 400
|
||||
|
||||
def test_boomerang_minimum(self):
|
||||
content = self.client.get(
|
||||
self.url, {
|
||||
'client': 'boomerang',
|
||||
'nt_nav_st': 1,
|
||||
}).content.decode()
|
||||
assert content == 'recorded'
|
||||
|
||||
@mock.patch('django_statsd.views.process_key')
|
||||
def test_boomerang_something(self, process_key):
|
||||
content = self.client.get(self.url, self.good).content.decode()
|
||||
assert content == 'recorded'
|
||||
assert process_key.called
|
||||
|
||||
def test_boomerang_post(self):
|
||||
assert self.client.post(self.url + '?' + urlencode(self.good), self.good).status_code == 405
|
||||
|
||||
def test_good_guard(self):
|
||||
settings.STATSD_RECORD_GUARD = lambda r: None
|
||||
response = self.client.get(self.url, self.good)
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_bad_guard(self):
|
||||
settings.STATSD_RECORD_GUARD = lambda r: HttpResponseForbidden()
|
||||
response = self.client.get(self.url, self.good)
|
||||
assert response.status_code == 403
|
||||
|
||||
def test_stick_get(self):
|
||||
assert self.client.get(self.url, self.stick).status_code == 405
|
||||
|
||||
@mock.patch('django_statsd.views.process_key')
|
||||
def test_stick(self, process_key):
|
||||
assert self.client.post(self.url, self.stick).status_code == 200
|
||||
assert process_key.called
|
||||
|
||||
def test_stick_start(self):
|
||||
data = self.stick.copy()
|
||||
del data['window.performance.timing.navigationStart']
|
||||
assert self.client.post(self.url, data).status_code == 400
|
||||
|
||||
@mock.patch('django_statsd.views.process_key')
|
||||
def test_stick_missing(self, process_key):
|
||||
data = self.stick.copy()
|
||||
del data['window.performance.timing.domInteractive']
|
||||
assert self.client.post(self.url, data).status_code == 200
|
||||
assert process_key.called
|
||||
|
||||
def test_stick_garbage(self):
|
||||
data = self.stick.copy()
|
||||
data['window.performance.timing.domInteractive'] = '<alert>'
|
||||
assert self.client.post(self.url, data).status_code == 400
|
||||
|
||||
def test_stick_some_garbage(self):
|
||||
data = self.stick.copy()
|
||||
data['window.performance.navigation.redirectCount'] = '<alert>'
|
||||
assert self.client.post(self.url, data).status_code == 400
|
||||
|
||||
def test_stick_more_garbage(self):
|
||||
data = self.stick.copy()
|
||||
data['window.performance.navigation.type'] = '<alert>'
|
||||
assert self.client.post(self.url, data).status_code == 400
|
||||
|
||||
|
||||
@mock.patch.object(middleware.statsd, 'incr')
|
||||
class TestErrorLog(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
logging.config.dictConfig(cfg)
|
||||
self.log = logging.getLogger('test.logging')
|
||||
|
||||
def division_error(self):
|
||||
try:
|
||||
1 / 0
|
||||
except:
|
||||
return sys.exc_info()
|
||||
|
||||
def test_emit(self, incr):
|
||||
self.log.error('blargh!', exc_info=self.division_error())
|
||||
assert incr.call_args[0][0] == 'error.zerodivisionerror'
|
||||
|
||||
def test_not_emit(self, incr):
|
||||
self.log.error('blargh!')
|
||||
assert not incr.called
|
||||
|
||||
|
||||
class TestPatchMethod(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestPatchMethod, self).setUp()
|
||||
|
||||
class DummyClass(object):
|
||||
|
||||
def sumargs(self, a, b, c=3, d=4):
|
||||
return a + b + c + d
|
||||
|
||||
def badfn(self, a, b=2):
|
||||
raise ValueError
|
||||
|
||||
self.cls = DummyClass
|
||||
|
||||
def test_late_patching(self):
|
||||
"""
|
||||
Objects created before patching should get patched as well.
|
||||
"""
|
||||
def patch_fn(original_fn, self, *args, **kwargs):
|
||||
return original_fn(self, *args, **kwargs) + 10
|
||||
|
||||
obj = self.cls()
|
||||
self.assertEqual(obj.sumargs(1, 2, 3, 4), 10)
|
||||
utils.patch_method(self.cls, 'sumargs')(patch_fn)
|
||||
self.assertEqual(obj.sumargs(1, 2, 3, 4), 20)
|
||||
|
||||
def test_doesnt_call_original_implicitly(self):
|
||||
"""
|
||||
Original fn must be called explicitly from patched to be
|
||||
executed.
|
||||
"""
|
||||
def patch_fn(original_fn, self, *args, **kwargs):
|
||||
return 10
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
obj = self.cls()
|
||||
obj.badfn(1, 2)
|
||||
|
||||
utils.patch_method(self.cls, 'badfn')(patch_fn)
|
||||
self.assertEqual(obj.badfn(1, 2), 10)
|
||||
|
||||
def test_args_kwargs_are_honored(self):
|
||||
"""
|
||||
Args and kwargs must be honored between calls from the patched to
|
||||
the original version.
|
||||
"""
|
||||
def patch_fn(original_fn, self, *args, **kwargs):
|
||||
return original_fn(self, *args, **kwargs)
|
||||
|
||||
utils.patch_method(self.cls, 'sumargs')(patch_fn)
|
||||
obj = self.cls()
|
||||
self.assertEqual(obj.sumargs(1, 2), 10)
|
||||
self.assertEqual(obj.sumargs(1, 1, d=1), 6)
|
||||
self.assertEqual(obj.sumargs(1, 1, 1, 1), 4)
|
||||
|
||||
def test_patched_fn_can_receive_arbitrary_arguments(self):
|
||||
"""
|
||||
Args and kwargs can be received arbitrarily with no contraints on
|
||||
the patched fn, even if the original_fn had a fixed set of
|
||||
allowed args and kwargs.
|
||||
"""
|
||||
def patch_fn(original_fn, self, *args, **kwargs):
|
||||
return args, kwargs
|
||||
|
||||
utils.patch_method(self.cls, 'badfn')(patch_fn)
|
||||
obj = self.cls()
|
||||
self.assertEqual(obj.badfn(1, d=2), ((1,), {'d': 2}))
|
||||
self.assertEqual(obj.badfn(1, d=2), ((1,), {'d': 2}))
|
||||
self.assertEqual(obj.badfn(1, 2, c=1, d=2), ((1, 2), {'c': 1, 'd': 2}))
|
||||
|
||||
|
||||
class TestCursorWrapperPatching(TestCase):
|
||||
example_queries = {
|
||||
'select': 'select * from something;',
|
||||
'insert': 'insert (1, 2) into something;',
|
||||
'update': 'update something set a=1;',
|
||||
}
|
||||
|
||||
def test_patched_callproc_calls_timer(self):
|
||||
for operation, query in list(self.example_queries.items()):
|
||||
with mock.patch.object(statsd, 'timer') as timer:
|
||||
client = mock.Mock(executable_name='client_executable_name')
|
||||
db = mock.Mock(executable_name='name', alias='alias', client=client)
|
||||
instance = mock.Mock(db=db)
|
||||
|
||||
patched_callproc(lambda *args, **kwargs: None, instance, query)
|
||||
|
||||
self.assertEqual(timer.call_count, 1)
|
||||
self.assertEqual(timer.call_args[0][0], 'db.client_executable_name.alias.callproc.%s' % operation)
|
||||
|
||||
def test_patched_execute_calls_timer(self):
|
||||
for operation, query in list(self.example_queries.items()):
|
||||
with mock.patch.object(statsd, 'timer') as timer:
|
||||
client = mock.Mock(executable_name='client_executable_name')
|
||||
db = mock.Mock(executable_name='name', alias='alias', client=client)
|
||||
instance = mock.Mock(db=db)
|
||||
|
||||
patched_execute(lambda *args, **kwargs: None, instance, query)
|
||||
|
||||
self.assertEqual(timer.call_count, 1)
|
||||
self.assertEqual(timer.call_args[0][0], 'db.client_executable_name.alias.execute.%s' % operation)
|
||||
|
||||
def test_patched_executemany_calls_timer(self):
|
||||
for operation, query in list(self.example_queries.items()):
|
||||
with mock.patch.object(statsd, 'timer') as timer:
|
||||
client = mock.Mock(executable_name='client_executable_name')
|
||||
db = mock.Mock(executable_name='name', alias='alias', client=client)
|
||||
instance = mock.Mock(db=db)
|
||||
|
||||
patched_executemany(lambda *args, **kwargs: None, instance, query)
|
||||
|
||||
self.assertEqual(timer.call_count, 1)
|
||||
self.assertEqual(timer.call_args[0][0], 'db.client_executable_name.alias.executemany.%s' % operation)
|
||||
|
||||
@mock.patch('django_statsd.patches.db.patched_callproc')
|
||||
@mock.patch('django_statsd.patches.db.patched_executemany')
|
||||
@mock.patch('django_statsd.patches.db.patched_execute')
|
||||
@mock.patch('django.db.backends.utils.CursorWrapper')
|
||||
def test_cursorwrapper_patching(self, CursorWrapper, execute, executemany, callproc):
|
||||
from django_statsd.patches.db import patch
|
||||
execute.__name__ = 'execute'
|
||||
executemany.__name__ = 'executemany'
|
||||
callproc.__name__ = 'callproc'
|
||||
execute.return_value = 'execute'
|
||||
executemany.return_value = 'executemany'
|
||||
callproc.return_value = 'callproc'
|
||||
patch()
|
||||
|
||||
self.assertEqual(CursorWrapper.execute(), 'execute')
|
||||
self.assertEqual(CursorWrapper.executemany(), 'executemany')
|
||||
self.assertEqual(CursorWrapper.callproc(), 'callproc')
|
32
tox.ini
32
tox.ini
|
@ -1,32 +0,0 @@
|
|||
[pytest]
|
||||
addopts=--tb=short
|
||||
|
||||
[tox]
|
||||
envlist =
|
||||
{py27,py33,py34,py35,pypy,pypy3}-django18,
|
||||
{py27,py34,py35,pypy,pypy3}-django{19,110},
|
||||
{py27,py34,py35,py36,pypy,pypy3}-django111,
|
||||
{py35,py36,pypy,pypy3}-djangomaster
|
||||
|
||||
[travis:env]
|
||||
DJANGO =
|
||||
1.8: django18
|
||||
1.9: django19
|
||||
1.10: django110
|
||||
1.11: django111
|
||||
master: djangomaster
|
||||
|
||||
[testenv]
|
||||
commands = py.test
|
||||
setenv =
|
||||
PYTHONDONTWRITEBYTECODE=1
|
||||
PYTHONWARNINGS=once
|
||||
|
||||
deps =
|
||||
py27: -roptional.txt
|
||||
django18: Django>=1.8,<1.9
|
||||
django19: Django>=1.9,<1.10
|
||||
django110: Django>=1.10,<1.11
|
||||
django111: Django>=1.11,<2.0
|
||||
djangomaster: https://github.com/django/django/archive/master.tar.gz
|
||||
-rrequirements.txt
|
Loading…
Reference in New Issue