passerelle/passerelle/base/models.py

1050 lines
39 KiB
Python

import collections
import copy
import datetime
import inspect
import logging
import os
import re
import sys
import traceback
import base64
import itertools
import uuid
from django.apps import apps
from django.conf import settings
from django.contrib.postgres.fields import JSONField
from django.core.exceptions import ValidationError, ObjectDoesNotExist, PermissionDenied
from django.core.urlresolvers import reverse
from django.db import connection, models, transaction
from django.db.models import Q
from django.test import override_settings
from django.utils.text import slugify
from django.utils import timezone, six
from django.utils import six
from django.utils.encoding import force_text
from django.utils.six.moves.urllib.parse import urlparse
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now
from django.core.files.base import ContentFile
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import fields
from model_utils.managers import InheritanceManager as ModelUtilsInheritanceManager
import passerelle
import requests
from passerelle.compat import json_loads
from passerelle.forms import GenericConnectorForm
from passerelle.utils.api import endpoint
from passerelle.utils.jsonresponse import APIError
KEYTYPE_CHOICES = (
('API', _('API Key')),
('SIGN', _('HMAC Signature')),
)
BASE_EXPORT_FIELDS = (models.TextField, models.CharField, models.SlugField,
models.URLField, models.BooleanField, models.IntegerField,
models.CommaSeparatedIntegerField, models.EmailField,
models.IntegerField, models.PositiveIntegerField, JSONField,
models.FloatField)
@six.python_2_unicode_compatible
class ApiUser(models.Model):
username = models.CharField(max_length=128,
verbose_name=_('Username'),
unique=True)
fullname = models.CharField(max_length=50,
verbose_name=_('Full Name'))
description = models.TextField(blank=True,
verbose_name=_('Description'))
keytype = models.CharField(max_length=4, choices=KEYTYPE_CHOICES,
blank=True, verbose_name=_('Key Type'))
key = models.CharField(max_length=256, blank=True, verbose_name=_('Key'))
ipsource = models.GenericIPAddressField(blank=True, null=True, unpack_ipv4=True,
verbose_name=_('IP Address'))
def __str__(self):
return u'%s <%s>' % (self.fullname, self.username)
def clean(self):
if self.keytype and not self.key:
raise ValidationError(_('Key can not be empty for type %s.') % self.keytype)
def export_json(self):
return {
'@type': 'passerelle-user',
'username': self.username,
'fullname': self.fullname,
'description': self.description,
'keytype': self.keytype,
'key': self.key,
'ipsource': self.ipsource,
}
@classmethod
def import_json(self, d, overwrite=False):
if d.get('@type') != 'passerelle-user':
raise ValueError('not a passerelle user export')
d = d.copy()
d.pop('@type')
api_user, created = self.objects.get_or_create(username=d['username'], defaults=d)
if overwrite and not created:
for key in d:
setattr(api_user, key, d[key])
api_user.save()
class InheritanceManager(ModelUtilsInheritanceManager):
def get_slug(self, slug, request=None):
'''
Returns a resource by its slug
Request based access control, if request is present
'''
resource = self.get_subclass(slug=slug)
if request and not resource.is_accessible_by(request):
raise PermissionDenied
return resource
def filter_apiuser(self, apiuser):
'''
Returns all resources accessible by apiuser
'''
return self.filter(Q(users=None) | Q(users=apiuser))
@six.python_2_unicode_compatible
class BaseResource(models.Model):
title = models.CharField(max_length=50, verbose_name=_('Title'))
slug = models.SlugField(verbose_name=_('Identifier'), unique=True)
description = models.TextField(verbose_name=_('Description'))
users = models.ManyToManyField(ApiUser, blank=True, related_name='+', related_query_name='+')
objects = InheritanceManager()
parameters = None
manager_view_template_name = None
form_base_class = GenericConnectorForm
# permission descriptions
_can_access_description = _('Access is limited to the following API users:')
# category ordering for display
_category_ordering = []
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
self.logger = ProxyLogger(connector=self, transaction_id=str(uuid.uuid4()))
def __str__(self):
return self.title
def get_css_class_name(self):
category = self.category if not hasattr(self.category, '_proxy____args') else self.category._proxy____args[0]
return "%s %s" % (slugify(category), self._meta.model_name)
def is_accessible_by(self, request):
if request.user.is_superuser:
return True
restricted = self.users.all()
return not restricted or request.apiuser in restricted
@classmethod
def is_enabled(cls):
return getattr(settings, 'PASSERELLE_APP_%s_ENABLED' % cls._meta.app_label.upper(), True)
@property
def requests(self):
if getattr(self, '_requests', None) is None:
self._requests = passerelle.utils.Request(resource=self, logger=self.logger)
return self._requests
@property
def logging_parameters(self):
resource_type = ContentType.objects.get_for_model(self)
try:
return LoggingParameters.objects.get(
resource_type=resource_type,
resource_pk=self.id)
except LoggingParameters.DoesNotExist:
return LoggingParameters(
resource_type=resource_type,
resource_pk=self.id)
@property
def log_level(self):
return self.logging_parameters.log_level
def set_log_level(self, value):
parameters = self.logging_parameters
parameters.log_level = value
parameters.save()
@property
def availability_parameters(self):
resource_type = ContentType.objects.get_for_model(self)
try:
return AvailabilityParameters.objects.get(
resource_type=resource_type,
resource_pk=self.id)
except AvailabilityParameters.DoesNotExist:
return AvailabilityParameters(
resource_type=resource_type,
resource_pk=self.id)
def soap_client(self, **kwargs):
return passerelle.utils.soap.SOAPClient(resource=self, **kwargs)
@classmethod
def get_verbose_name(cls):
return cls._meta.verbose_name
@classmethod
def get_connector_slug(cls):
return cls._meta.app_label.replace('_', '-')
def get_absolute_url(self):
return reverse('view-connector',
kwargs={'connector': self.get_connector_slug(), 'slug': self.slug})
@classmethod
def get_add_url(cls):
return reverse('create-connector', kwargs={'connector': cls.get_connector_slug()})
def get_edit_url(self):
return reverse('edit-connector',
kwargs={'connector': self.get_connector_slug(), 'slug': self.slug})
def get_delete_url(self):
return reverse('delete-connector',
kwargs={'connector': self.get_connector_slug(), 'slug': self.slug})
def get_description_fields(self):
fields = []
for field in self._meta.fields:
if (field.name.endswith(('key', 'password', 'secret', 'keystore', 'token')) or
field.name in ('id', 'title', 'slug', 'description',
'log_level', 'users', 'client_certificate')):
continue
if hasattr(self, 'get_%s_display' % field.name):
value = getattr(self, 'get_%s_display' % field.name)()
else:
value = getattr(self, field.name, None)
if isinstance(field, models.URLField) and value:
# hide http authentication part
value = re.sub(r'://([^/]*:[^/]*?)@', '://***:***@', value)
fields.append((field, value))
return fields
def get_endpoints_infos(self):
endpoints = []
for name, method in inspect.getmembers(self, predicate=inspect.ismethod):
if hasattr(method, 'endpoint_info'):
method.endpoint_info.object = self
endpoint_name = method.endpoint_info.name
if endpoint_name == 'up' and hasattr(self.check_status, 'not_implemented'):
# hide automatic up endpoint if check_status method is not implemented
continue
for http_method in method.endpoint_info.methods:
# duplicate information to give each method its own entry
endpoint_info = copy.copy(method.endpoint_info)
endpoint_info.http_method = http_method
endpoints.append(endpoint_info)
endpoints.sort(key=lambda x: (x.display_category_order, x.display_category, x.display_order or 99999999, x.name or '', x.pattern or ''))
if hasattr(self, 'queries'):
self.append_custom_queries(endpoints)
return endpoints
def append_custom_queries(self, endpoints):
for query in self.queries.all():
if hasattr(query, 'as_endpoint'):
endpoints.append(query.as_endpoint())
def get_connector_permissions(self):
perms = {}
for endpoint_info in self.get_endpoints_infos():
permission = endpoint_info.perm
if permission:
perms[permission] = getattr(self,
'_%s_description' % permission,
_('Access (%s) is limited to the following API users:') % permission)
return [{'key': x[0], 'label': x[1]} for x in perms.items()]
def get_availability_status(self):
resource_type = ContentType.objects.get_for_model(self)
current_status = ResourceStatus.objects.filter(
resource_type=resource_type,
resource_pk=self.pk).first()
return current_status
def down(self):
status = self.get_availability_status()
return (status and status.down())
@endpoint(description=_('Check service availability'), display_order=-1)
def up(self, request, **kwargs):
if self.down():
raise APIError('service not available')
return {'err': 0}
def export_json(self):
d = {
'@type': 'passerelle-resource',
'resource_type': '%s.%s' % (self.__class__._meta.app_label,
self.__class__._meta.model_name),
'title': self.title,
'slug': self.slug,
'description': self.description,
'log_level': self.log_level,
'access_rights': []
}
resource_type = ContentType.objects.get_for_model(self)
for ar in AccessRight.objects.filter(resource_type=resource_type,
resource_pk=self.pk).select_related():
d['access_rights'].append({
'codename': ar.codename,
'apiuser': ar.apiuser.username,
})
concrete_fields = [
f for f in self.__class__._meta.get_fields()
if f.concrete and (
not f.is_relation
or f.one_to_one
or (f.many_to_one and f.related_model)
)
]
for field in concrete_fields:
if field.name == 'id':
continue
value = getattr(self, field.attname)
if isinstance(field, BASE_EXPORT_FIELDS):
d[field.name] = value
elif isinstance(field, models.FileField):
if value:
d[field.name] = {
'name': os.path.basename(value.name),
'content': force_text(base64.b64encode(value.read())),
}
else:
d[field.name] = None
else:
raise Exception('export_json: field %s of ressource class %s is unsupported' % (
field, self.__class__))
return d
@staticmethod
def import_json(d, import_users=False, overwrite=False):
if d.get('@type') != 'passerelle-resource':
raise ValueError('not a passerelle resource export')
d = d.copy()
d.pop('@type')
app_label, model_name = d['resource_type'].split('.')
model = apps.get_model(app_label, model_name)
try:
instance = model.objects.get(slug=d['slug'])
if not overwrite:
return
except model.DoesNotExist:
instance = None
with transaction.atomic():
# prevent semi-creation of ressources
instance = model.import_json_real(overwrite, instance, d)
resource_type = ContentType.objects.get_for_model(instance)
# We can only connect AccessRight objects to the new Resource after its creation
if import_users:
for ar in d['access_rights']:
apiuser = ApiUser.objects.get(username=ar['apiuser'])
AccessRight.objects.get_or_create(
codename=ar['codename'],
resource_type=resource_type,
resource_pk=instance.pk,
apiuser=apiuser)
return instance
@classmethod
def import_json_real(cls, overwrite, instance, d, **kwargs):
init_kwargs = {
'title': d['title'],
'slug': d['slug'],
'description': d['description'],
}
init_kwargs.update(kwargs)
if instance:
for key in init_kwargs:
setattr(instance, key, init_kwargs[key])
else:
instance = cls(**init_kwargs)
concrete_fields = [
f for f in cls._meta.get_fields()
if f.concrete and (
not f.is_relation
or f.one_to_one
or (f.many_to_one and f.related_model)
)
]
for field in concrete_fields:
if field.name == 'id':
continue
value = d[field.name]
if isinstance(field, BASE_EXPORT_FIELDS):
setattr(instance, field.attname, value)
elif isinstance(field, models.FileField):
if value:
getattr(instance, field.attname).save(
value['name'],
ContentFile(base64.b64decode(value['content'])),
save=False)
else:
raise Exception('import_json_real: field %s of ressource class '
'%s is unsupported' % (field, cls))
instance.save()
if 'log_level' in d:
instance.set_log_level(d['log_level'])
return instance
def clean_logs(self):
# clean logs
timestamp = timezone.now() - datetime.timedelta(days=settings.LOG_RETENTION_DAYS)
ResourceLog.objects.filter(
appname=self.get_connector_slug(),
slug=self.slug,
timestamp__lt=timestamp).delete()
def check_status(self):
# should raise an exception if status is not ok
raise NotImplementedError
check_status.not_implemented = True
def availability(self):
# "availability" cron job to update service statuses
# eventually skip it
if not self.availability_parameters.run_check:
return
availability_parameters = self.availability_parameters
try:
self.check_status()
status = 'up'
message = ''
except NotImplementedError:
return
except Exception as e:
from passerelle.utils.conversion import exception_to_text
status = 'down'
message = exception_to_text(e)[:500]
resource_type = ContentType.objects.get_for_model(self)
current_status = ResourceStatus.objects.filter(
resource_type=resource_type,
resource_pk=self.pk).first()
if not current_status or status != current_status.status:
if status == 'down' and not self.down(): # new downtime
if availability_parameters.has_zero_delay():
self.logger.error(u'connector "%s" (%s) is now down: %s', self, self.__class__.__name__, message)
else:
self.logger.warning(u'connector "%s" (%s) is now down: %s', self, self.__class__.__name__, message)
ResourceStatus(
resource_type=resource_type,
resource_pk=self.pk,
status=status,
message=message).save()
if status == 'up' and current_status:
self.logger.info(u'connector "%s" (%s) is back up', self, self.__class__.__name__)
elif status == 'down':
# check last_notification_downtime and current downtime to see if it matches a new notification delay
last_notification_timestamp = current_status.last_notification_timestamp or current_status.start_timestamp
current_time = now()
downtime = (current_time - current_status.start_timestamp).total_seconds() // 60
last_notification_downtime = (
last_notification_timestamp - current_status.start_timestamp).total_seconds() // 60
for delay in availability_parameters.notification_delays_generator():
if not delay:
continue
if downtime >= delay:
if last_notification_downtime < delay:
days = downtime // 1440
hours = downtime // 60
if days > 1:
human_duration = 'for %d days' % days
elif hours > 1:
human_duration = 'for %d hours' % hours
else:
human_duration = 'for %d minutes' % downtime
self.logger.error(u'connector "%s" (%s) has been down %s: %s',
self, self.__class__.__name__,
human_duration,
message,
# when connector is down, logging is shutdown
force=True)
ResourceStatus.objects.filter(pk=current_status.pk).update(
message=message, last_notification_timestamp=current_time)
break
else:
ResourceStatus.objects.filter(pk=current_status.pk).update(
message=message)
break
else:
ResourceStatus.objects.filter(pk=current_status.pk).update(
message=message)
def hourly(self):
pass
def daily(self):
self.clean_logs()
def weekly(self):
pass
def monthly(self):
pass
def jobs_set(self):
resource_type = ContentType.objects.get_for_model(self)
return Job.objects.filter(
resource_type=resource_type,
resource_pk=self.pk)
def jobs(self):
# "jobs" cron job to run asynchronous tasks
if self.down():
# don't try running jobs if connector is known to be down.
return
skip_locked = {'skip_locked': True}
if not connection.features.has_select_for_update_skip_locked:
skip_locked = {}
skipped_jobs = []
while True:
with transaction.atomic():
# lock an immediately runnable job
job = self.jobs_set().exclude(
pk__in=skipped_jobs
).filter(
Q(after_timestamp__isnull=True) | Q(after_timestamp__lt=timezone.now()),
status='registered'
).select_for_update(**skip_locked
).order_by('pk')[:1].first()
if not job:
break
job.status = 'running'
job.save()
# release lock
try:
getattr(self, job.method_name)(**job.parameters)
except SkipJob as e:
job.status = 'registered'
job.set_after_timestamp(e.after_timestamp)
skipped_jobs.append(job.id)
except Exception as e:
self.handle_job_error(job, sys.exc_info())
else:
job.status = 'completed'
job.done_timestamp = timezone.now()
job.save()
def add_job(self, method_name, natural_id=None, after_timestamp=None, **kwargs):
resource_type = ContentType.objects.get_for_model(self)
job = Job(resource_type=resource_type,
resource_pk=self.pk,
method_name=method_name,
natural_id=natural_id,
parameters=kwargs)
job.set_after_timestamp(after_timestamp)
job.save()
return job
def handle_job_error(self, job, exc_info):
from passerelle.utils.conversion import exception_to_text
(exc_type, exc_value, tb) = exc_info
job.status = 'failed'
job.done_timestamp = timezone.now()
job.status_details = {
'error_summary': '\n'.join(traceback.format_exception_only(exc_type, exc_value)).strip(),
}
self.logger.error('error running %s job (%s)',
job.method_name,
exception_to_text(exc_value),
exc_info=exc_info)
@six.python_2_unicode_compatible
class AccessRight(models.Model):
codename = models.CharField(max_length=100, verbose_name='codename')
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
resource_pk = models.PositiveIntegerField()
resource = fields.GenericForeignKey('resource_type', 'resource_pk')
apiuser = models.ForeignKey(ApiUser, verbose_name=_('API User'), on_delete=models.CASCADE)
class Meta:
permissions = (
('view_accessright', 'Can view access right'),
)
unique_together = (
('codename', 'resource_type', 'resource_pk', 'apiuser'),
)
def __str__(self):
return '%s (on %s <%s>) (for %s)' % (self.codename, self.resource_type, self.resource_pk, self.apiuser)
class LoggingParameters(models.Model):
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
resource_pk = models.PositiveIntegerField()
resource = fields.GenericForeignKey('resource_type', 'resource_pk')
log_level = models.CharField(
verbose_name=_('Log level'),
max_length=10,
choices = (
('DEBUG', _('Debug')),
('INFO', _('Info')),
('WARNING', _('Warning')),
('ERROR', _('Error')),
('CRITICAL', _('Critical')),
),
default='INFO'
)
trace_emails = models.TextField(
verbose_name=_('Emails to receive error and critical traces'),
help_text=_('One address per line (empty for site administrators)'),
blank=True
)
requests_max_size = models.PositiveIntegerField(
verbose_name=_('Requests maximum size'),
help_text=_('Maximum HTTP request size to log'),
default=settings.LOGGED_REQUESTS_MAX_SIZE
)
responses_max_size = models.PositiveIntegerField(
verbose_name=_('Responses maximum size'),
help_text=_('Maximum HTTP reponse size to log'),
default=settings.LOGGED_RESPONSES_MAX_SIZE
)
class Meta:
unique_together = (('resource_type', 'resource_pk'))
def parse_notification_delays(value):
delays = [int(v.strip()) for v in value.split(',')]
if not all(delay >= 0 for delay in delays):
raise ValueError
if delays != sorted(delays):
raise ValueError
if len(set(delays)) != len(delays):
raise ValueError
return delays
def validate_notification_delays(value):
try:
if not value:
raise ValueError
parse_notification_delays(value)
except ValueError:
raise ValidationError(_('You must provide a list of increasing minutes delays'))
class AvailabilityParameters(models.Model):
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
resource_pk = models.PositiveIntegerField()
resource = fields.GenericForeignKey('resource_type', 'resource_pk')
run_check = models.BooleanField(
default=True, verbose_name=_('Run regular availability checks'),
help_text=_('Run an availability check every 5 minutes'))
notification_delays = models.TextField(
verbose_name=_('Notification delays'),
default='0',
blank=False,
validators=[validate_notification_delays],
help_text=_('Increasing delay between error notifications in minutes, ex.: 0,5,10'))
def has_zero_delay(self):
return 0 in parse_notification_delays(self.notification_delays)
def notification_delays_generator(self):
notification_delays = parse_notification_delays(self.notification_delays)
last_notification_delay = notification_delays[-1]
if last_notification_delay > 1:
notification_delays = itertools.chain(
notification_delays,
itertools.count(2 * last_notification_delay, last_notification_delay))
return notification_delays
class Meta:
unique_together = (('resource_type', 'resource_pk'))
class SkipJob(Exception):
def __init__(self, after_timestamp=None):
self.after_timestamp = after_timestamp
super(SkipJob, self).__init__()
class Job(models.Model):
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
resource_pk = models.PositiveIntegerField()
resource = fields.GenericForeignKey('resource_type', 'resource_pk')
method_name = models.CharField(max_length=50)
natural_id = models.CharField(max_length=256, blank=True, null=True)
parameters = JSONField(default={})
creation_timestamp = models.DateTimeField(auto_now_add=True)
update_timestamp = models.DateTimeField(auto_now=True)
done_timestamp = models.DateTimeField(null=True)
after_timestamp = models.DateTimeField(null=True)
status = models.CharField(
max_length=20,
default='registered',
choices=(('registered', _('Registered')),
('running', _('Running')),
('failed', _('Failed')),
('completed', _('Completed'))
),
)
status_details = JSONField(default={})
def set_after_timestamp(self, value):
if isinstance(value, datetime.datetime):
self.after_timestamp = value
elif isinstance(value, six.integer_types + (float,)):
self.after_timestamp = timezone.now() + datetime.timedelta(seconds=value)
elif isinstance(value, datetime.timedelta):
self.after_timestamp = timezone.now() + value
else:
self.after_timestamp = value
@six.python_2_unicode_compatible
class ResourceLog(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
appname = models.CharField(max_length=128, verbose_name='appname', null=True)
slug = models.CharField(max_length=128, verbose_name='slug', null=True)
levelno = models.IntegerField(verbose_name='log level')
sourceip = models.GenericIPAddressField(blank=True, null=True, verbose_name=_('Source IP'))
message = models.TextField(verbose_name='message')
extra = JSONField(verbose_name='extras', default={})
class Meta:
ordering = ('id',)
permissions = (
('view_resourcelog', 'Can view resource logs'),
)
@property
def level(self):
return slugify(logging.getLevelName(self.levelno))
def __str__(self):
return '%s %s %s %s' % (self.timestamp, self.levelno, self.appname, self.slug)
STATUS_CHOICES = (
('unknown', _('Unknown')),
('up', _('Up')),
('down', _('Down')),
)
class ResourceStatus(models.Model):
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
resource_pk = models.PositiveIntegerField()
start_timestamp = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='unknown')
message = models.CharField(max_length=500, blank=True)
last_notification_timestamp = models.DateTimeField(blank=True, null=True)
class Meta:
ordering = ['-start_timestamp']
def up(self):
return self.status == 'up'
def down(self):
return self.status == 'down'
class ProxyLogger(object):
def __init__(self, connector, extra=None, transaction_id=None):
self.connector = connector
self.appname = connector.get_connector_slug()
self.slug = connector.slug
self.extra = extra or {}
logger_name = 'passerelle.resource.%s.%s' % (self.appname, self.slug)
self.transaction_id = transaction_id
self._logger = logging.getLogger(logger_name)
self._logger.setLevel(connector.log_level)
def context(self, **kwargs):
return self.__class__(self.connector, extra=dict(self.extra or {}, **kwargs))
@property
def level(self):
return self._logger.getEffectiveLevel()
def _log(self, levelname, message, *args, **kwargs):
force = kwargs.pop('force', False)
if self.connector.down() and not force:
# don't log if the connector is known to be down
return
# hide credentials in urls
for url in re.findall(r'(https?://\S+)', message):
try:
parsed = urlparse(url)
except Exception:
continue
if not parsed.username and not parsed.password:
continue
replaced = parsed._replace(netloc="{}:{}@{}".format('***', '***', parsed.hostname))
message = message.replace(url, replaced.geturl())
levelno = getattr(logging, levelname)
if self._logger.level <= levelno:
attr = {}
attr['levelno'] = levelno
rl_message = message
rl_args = args
# Borrowed from python stdlib logging/__init__.py
if (rl_args and len(rl_args) == 1 and isinstance(rl_args[0], collections.Mapping)
and rl_args[0]):
rl_args = rl_args[0]
# End Borrow
if rl_args:
rl_message = rl_message % rl_args
attr['message'] = rl_message
attr['appname'] = self.appname
attr['slug'] = self.slug
extra = (self.extra or {}).copy()
extra.update(kwargs.get('extra', {}))
request = extra.get('request')
def is_json_serializable(value):
return isinstance(value, (list, dict, bool) + six.integer_types + six.string_types)
attr['extra'] = {key: value for key, value in extra.items() if is_json_serializable(value)}
if self.transaction_id:
attr['extra']['transaction_id'] = self.transaction_id
if getattr(request, 'META', None):
if 'HTTP_X_FORWARDED_FOR' in request.META:
sourceip = request.META.get('HTTP_X_FORWARDED_FOR', '').split(",")[0].strip()
else:
sourceip = request.META.get('REMOTE_ADDR')
else:
sourceip = None
attr['sourceip'] = sourceip
if kwargs.get('exc_info'):
(exc_type, exc_value, tb) = sys.exc_info()
attr['extra']['error_summary'] = traceback.format_exception_only(exc_type, exc_value)
ResourceLog.objects.create(**attr)
admins = settings.ADMINS
logging_parameters = self.connector.logging_parameters
if logging_parameters.trace_emails:
admins = [('', x) for x in logging_parameters.trace_emails.splitlines()]
with override_settings(ADMINS=admins):
getattr(self._logger, levelname.lower())(message, *args, **kwargs)
def exception(self, message, *args, **kwargs):
kwargs['exc_info'] = 1
self._log('ERROR', message, *args, **kwargs)
def debug(self, message, *args, **kwargs):
self._log('DEBUG', message, *args, **kwargs)
def info(self, message, *args, **kwargs):
self._log('INFO', message, *args, **kwargs)
def warning(self, message, *args, **kwargs):
self._log('WARNING', message, *args, **kwargs)
def critical(self, message, *args, **kwargs):
self._log('CRITICAL', message, *args, **kwargs)
def error(self, message, *args, **kwargs):
self._log('ERROR', message, *args, **kwargs)
def fatal(self, message, *args, **kwargs):
self._log('FATAL', message, *args, **kwargs)
class HTTPResource(models.Model):
'''Mixin to add basic TLS/Basic HTTP authentication fields to any
resource.'''
basic_auth_username = models.CharField(
max_length=128,
verbose_name=_('Basic authentication username'),
blank=True)
basic_auth_password = models.CharField(
max_length=128,
verbose_name=_('Basic authentication password'),
blank=True)
client_certificate = models.FileField(
verbose_name=_('TLS client certificate'),
null=True,
blank=True)
trusted_certificate_authorities = models.FileField(
verbose_name=_('TLS trusted CAs'),
null=True,
blank=True)
verify_cert = models.BooleanField(
verbose_name=_('TLS verify certificates'),
default=True,
blank=True)
http_proxy = models.CharField(
max_length=128,
verbose_name=_('HTTP and HTTPS proxy'),
blank=True)
class Meta:
abstract = True
class SMSResource(BaseResource):
category = _('SMS Providers')
documentation_url = 'https://doc-publik.entrouvert.com/admin-fonctionnel/les-tutos/configuration-envoi-sms/'
_can_send_messages_description = _('Sending messages is limited to the following API users:')
max_message_length = models.IntegerField(_('Maximum message length'), default=160)
@classmethod
def clean_numbers(cls, destinations, default_country_code='33',
default_trunk_prefix='0'): # Yeah France first !
numbers = []
for dest in destinations:
# most gateways needs the number prefixed by the country code, this is
# really unfortunate.
dest = dest.strip()
number = ''.join(re.findall('[0-9]', dest))
if dest.startswith('+'):
number = '00' + number
elif number.startswith('00'):
# assumes 00 is international access code, remove it
pass
elif number.startswith(default_trunk_prefix):
number = '00' + default_country_code + number[len(default_trunk_prefix):]
else:
raise APIError('phone number %r is unsupported (no international prefix, '
'no local trunk prefix)' % number)
numbers.append(number)
return numbers
@endpoint(perm='can_send_messages', methods=['post'])
def send(self, request, *args, **kwargs):
try:
data = json_loads(request.body)
assert isinstance(data, dict), 'JSON payload is not a dict'
assert 'message' in data, 'missing "message" in JSON payload'
assert 'from' in data, 'missing "from" in JSON payload'
assert 'to' in data, 'missing "to" in JSON payload'
assert isinstance(data['message'], six.text_type), 'message is not a string'
assert isinstance(data['from'], six.text_type), 'from is not a string'
assert all(map(lambda x: isinstance(x, six.text_type), data['to'])), \
'to is not a list of strings'
except (ValueError, AssertionError) as e:
raise APIError('Payload error: %s' % e)
data['message'] = data['message'][:self.max_message_length]
logging.info('sending message %r to %r with sending number %r',
data['message'], data['to'], data['from'])
stop = not bool('nostop' in request.GET)
result = {'data': self.send_msg(data['message'], data['from'], data['to'], stop=stop)}
SMSLog.objects.create(appname=self.get_connector_slug(), slug=self.slug)
return result
class Meta:
abstract = True
@six.python_2_unicode_compatible
class SMSLog(models.Model):
timestamp = models.DateTimeField(auto_now_add=True)
appname = models.CharField(max_length=128, verbose_name='appname', null=True)
slug = models.CharField(max_length=128, verbose_name='slug', null=True)
def __str__(self):
return '%s %s %s' % (self.timestamp, self.appname, self.slug)
@six.python_2_unicode_compatible
class BaseQuery(models.Model):
'''Base for building custom queries.
It must define "resource" attribute as a ForeignKey to a BaseResource subclass,
and probably extend its "as_endpoint" method to document its parameters.
'''
name = models.CharField(
verbose_name=_('Name'),
max_length=128)
slug = models.SlugField(
verbose_name=_('Slug'),
max_length=128)
description = models.TextField(
verbose_name=_('Description'),
blank=True)
http_method = 'get'
class Meta:
abstract = True
unique_together = [
('resource', 'name'),
('resource', 'slug'),
]
ordering = ['name']
verbose_name = _('Query')
def as_endpoint(self, path=''):
name = '%s/%s/' % (path, self.slug) if path else self.slug + '/'
e = endpoint(name=name, description=self.name, long_description=self.description)
e.http_method = self.http_method
e.object = self.resource
return e
def __str__(self):
return self.name
def export_json(self):
d = {}
fields = [
f for f in self.__class__._meta.get_fields()
if f.concrete and (
not f.is_relation
or f.one_to_one
or (f.many_to_one and f.related_model)
) and f.name not in ['id', 'resource']
]
for field in fields:
d[field.name] = getattr(self, field.name)
return d
@classmethod
def import_json(cls, d):
return cls(**d)
def delete_url(self):
return reverse(self.delete_view,
kwargs={'slug': self.resource.slug, 'pk': self.pk})
def edit_url(self):
return reverse(self.edit_view,
kwargs={'slug': self.resource.slug, 'pk': self.pk})