1155 lines
42 KiB
Python
1155 lines
42 KiB
Python
import base64
|
|
import collections.abc
|
|
import copy
|
|
import datetime
|
|
import inspect
|
|
import itertools
|
|
import logging
|
|
import os
|
|
import re
|
|
import sys
|
|
import traceback
|
|
import uuid
|
|
from contextlib import contextmanager
|
|
from urllib.parse import urlparse
|
|
|
|
from django.apps import apps
|
|
from django.conf import settings
|
|
from django.contrib.contenttypes import fields
|
|
from django.contrib.contenttypes.models import ContentType
|
|
from django.contrib.postgres.fields import ArrayField
|
|
from django.core.exceptions import PermissionDenied, ValidationError
|
|
from django.core.files.base import ContentFile
|
|
from django.db import connection, models, transaction
|
|
from django.db.models import JSONField, Q
|
|
from django.forms.models import modelform_factory
|
|
from django.forms.widgets import ClearableFileInput
|
|
from django.test import override_settings
|
|
from django.urls import reverse
|
|
from django.utils import timezone
|
|
from django.utils.encoding import force_str
|
|
from django.utils.log import AdminEmailHandler
|
|
from django.utils.text import slugify
|
|
from django.utils.timezone import now
|
|
from django.utils.translation import gettext_lazy as _
|
|
from model_utils.managers import InheritanceManager as ModelUtilsInheritanceManager
|
|
|
|
import passerelle
|
|
from passerelle.forms import GenericConnectorForm
|
|
from passerelle.utils import ImportSiteError
|
|
from passerelle.utils.api import endpoint
|
|
from passerelle.utils.jsonresponse import APIError
|
|
from passerelle.utils.sftp import SFTP, SFTPField
|
|
|
|
KEYTYPE_CHOICES = (
|
|
('API', _('API Key')),
|
|
('SIGN', _('HMAC Signature')),
|
|
)
|
|
|
|
LOGLEVEL_CHOICES = (
|
|
('DEBUG', _('Debug')),
|
|
('INFO', _('Info')),
|
|
('WARNING', _('Warning')),
|
|
('ERROR', _('Error')),
|
|
('CRITICAL', _('Critical')),
|
|
)
|
|
|
|
BASE_EXPORT_FIELDS = (
|
|
models.TextField,
|
|
models.CharField,
|
|
models.SlugField,
|
|
models.URLField,
|
|
models.BooleanField,
|
|
models.IntegerField,
|
|
models.CommaSeparatedIntegerField,
|
|
models.EmailField,
|
|
models.IntegerField,
|
|
models.PositiveIntegerField,
|
|
JSONField,
|
|
models.FloatField,
|
|
ArrayField,
|
|
)
|
|
|
|
|
|
class ApiUser(models.Model):
|
|
username = models.CharField(max_length=128, verbose_name=_('Username'), unique=True)
|
|
fullname = models.CharField(max_length=50, verbose_name=_('Full Name'))
|
|
description = models.TextField(blank=True, verbose_name=_('Description'))
|
|
|
|
keytype = models.CharField(max_length=4, choices=KEYTYPE_CHOICES, blank=True, verbose_name=_('Key Type'))
|
|
key = models.CharField(max_length=256, blank=True, verbose_name=_('Key'))
|
|
|
|
ipsource = models.GenericIPAddressField(
|
|
blank=True, null=True, unpack_ipv4=True, verbose_name=_('IP Address')
|
|
)
|
|
|
|
def __str__(self):
|
|
return '%s <%s>' % (self.fullname, self.username)
|
|
|
|
def clean(self):
|
|
if self.keytype and not self.key:
|
|
raise ValidationError(_('Key can not be empty for type %s.') % self.keytype)
|
|
if self.keytype == 'SIGN' and len(self.key) * 8 < 256:
|
|
raise ValidationError(_('Signature Key length must be at least 256 bits.'))
|
|
|
|
def export_json(self):
|
|
return {
|
|
'@type': 'passerelle-user',
|
|
'username': self.username,
|
|
'fullname': self.fullname,
|
|
'description': self.description,
|
|
'keytype': self.keytype,
|
|
'key': self.key,
|
|
'ipsource': self.ipsource,
|
|
}
|
|
|
|
@classmethod
|
|
def import_json(cls, d, overwrite=False):
|
|
if d.get('@type') != 'passerelle-user':
|
|
raise ValueError('not a passerelle user export')
|
|
d = d.copy()
|
|
d.pop('@type')
|
|
api_user, created = cls.objects.get_or_create(username=d['username'], defaults=d)
|
|
if overwrite and not created:
|
|
for key in d:
|
|
setattr(api_user, key, d[key])
|
|
api_user.save()
|
|
|
|
|
|
class InheritanceManager(ModelUtilsInheritanceManager):
|
|
def get_slug(self, slug, request=None):
|
|
"""
|
|
Returns a resource by its slug
|
|
Request based access control, if request is present
|
|
"""
|
|
resource = self.get_subclass(slug=slug)
|
|
if request and not resource.is_accessible_by(request):
|
|
raise PermissionDenied
|
|
return resource
|
|
|
|
def filter_apiuser(self, apiuser):
|
|
"""
|
|
Returns all resources accessible by apiuser
|
|
"""
|
|
return self.filter(Q(users=None) | Q(users=apiuser))
|
|
|
|
|
|
class BaseResource(models.Model):
|
|
title = models.CharField(max_length=50, verbose_name=_('Title'))
|
|
slug = models.SlugField(verbose_name=_('Identifier'), unique=True)
|
|
description = models.TextField(verbose_name=_('Description'))
|
|
users = models.ManyToManyField(ApiUser, blank=True, related_name='+', related_query_name='+')
|
|
objects = InheritanceManager()
|
|
|
|
parameters = None
|
|
manager_view_template_name = None
|
|
manager_form_base_class = GenericConnectorForm
|
|
hide_description_fields = []
|
|
|
|
# permission descriptions
|
|
_can_access_description = _('Access is limited to the following API users:')
|
|
|
|
# category ordering for display
|
|
_category_ordering = []
|
|
|
|
class Meta:
|
|
abstract = True
|
|
|
|
class UnknownBaseResourceError(LookupError):
|
|
pass
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
super().__init__(*args, **kwargs)
|
|
self.logger = ProxyLogger(connector=self, transaction_id=str(uuid.uuid4()))
|
|
|
|
def __str__(self):
|
|
return self.title
|
|
|
|
def get_css_class_name(self):
|
|
category = (
|
|
self.category if not hasattr(self.category, '_proxy____args') else self.category._proxy____args[0]
|
|
)
|
|
return "%s %s" % (slugify(category), self._meta.model_name)
|
|
|
|
def is_accessible_by(self, request):
|
|
if request.user.is_superuser:
|
|
return True
|
|
restricted = self.users.all()
|
|
return not restricted or request.apiuser in restricted
|
|
|
|
@classmethod
|
|
def is_enabled(cls):
|
|
return getattr(settings, 'PASSERELLE_APP_%s_ENABLED' % cls._meta.app_label.upper(), True)
|
|
|
|
@classmethod
|
|
def is_legacy(cls):
|
|
return getattr(settings, 'PASSERELLE_APP_%s_LEGACY' % cls._meta.app_label.upper(), False)
|
|
|
|
@classmethod
|
|
def get_manager_form_class(cls, **kwargs):
|
|
"""
|
|
Return the class to use for new/edit connector forms.
|
|
"""
|
|
form_class = modelform_factory(cls, form=cls.manager_form_base_class, **kwargs)
|
|
for field in form_class.base_fields.values():
|
|
if isinstance(field.widget, ClearableFileInput):
|
|
field.widget.template_with_initial = (
|
|
'%(initial_text)s: %(initial)s %(clear_template)s<br />%(input_text)s: %(input)s'
|
|
)
|
|
return form_class
|
|
|
|
@property
|
|
def requests(self):
|
|
if getattr(self, '_requests', None) is None:
|
|
self._requests = self.make_requests()
|
|
return self._requests
|
|
|
|
def make_requests(self, **kwargs):
|
|
init_kwargs = {'resource': self, 'logger': self.logger}
|
|
init_kwargs.update(kwargs)
|
|
return passerelle.utils.Request(**init_kwargs)
|
|
|
|
@property
|
|
def logging_parameters(self):
|
|
resource_type = ContentType.objects.get_for_model(self)
|
|
try:
|
|
return LoggingParameters.objects.get(resource_type=resource_type, resource_pk=self.id)
|
|
except LoggingParameters.DoesNotExist:
|
|
return LoggingParameters(resource_type=resource_type, resource_pk=self.id)
|
|
|
|
@property
|
|
def log_level(self):
|
|
return self.logging_parameters.log_level
|
|
|
|
def set_log_level(self, value):
|
|
parameters = self.logging_parameters
|
|
parameters.log_level = value
|
|
parameters.save()
|
|
|
|
@property
|
|
def availability_parameters(self):
|
|
resource_type = ContentType.objects.get_for_model(self)
|
|
try:
|
|
return AvailabilityParameters.objects.get(resource_type=resource_type, resource_pk=self.id)
|
|
except AvailabilityParameters.DoesNotExist:
|
|
return AvailabilityParameters(resource_type=resource_type, resource_pk=self.id)
|
|
|
|
def soap_client(self, **kwargs):
|
|
return passerelle.utils.soap.SOAPClient(resource=self, **kwargs)
|
|
|
|
@classmethod
|
|
def get_verbose_name(cls):
|
|
return cls._meta.verbose_name
|
|
|
|
@classmethod
|
|
def get_connector_slug(cls):
|
|
return cls._meta.app_label.replace('_', '-')
|
|
|
|
def get_absolute_url(self):
|
|
return reverse('view-connector', kwargs={'connector': self.get_connector_slug(), 'slug': self.slug})
|
|
|
|
@classmethod
|
|
def get_add_url(cls):
|
|
return reverse('create-connector', kwargs={'connector': cls.get_connector_slug()})
|
|
|
|
def get_edit_url(self):
|
|
return reverse('edit-connector', kwargs={'connector': self.get_connector_slug(), 'slug': self.slug})
|
|
|
|
def get_delete_url(self):
|
|
return reverse('delete-connector', kwargs={'connector': self.get_connector_slug(), 'slug': self.slug})
|
|
|
|
def get_description_fields(self):
|
|
fields = []
|
|
hide_fields = ['id', 'title', 'slug', 'description', 'log_level', 'users', 'client_certificate']
|
|
hide_fields.extend(self.hide_description_fields)
|
|
for field in self._meta.fields:
|
|
if (
|
|
field.name.endswith(("key", "password", "secret", "keystore", "token", "username"))
|
|
or field.name in hide_fields
|
|
):
|
|
continue
|
|
if hasattr(self, 'get_%s_display' % field.name):
|
|
value = getattr(self, 'get_%s_display' % field.name)()
|
|
else:
|
|
value = getattr(self, field.name, None)
|
|
if isinstance(field, models.URLField) and value:
|
|
# hide http authentication part
|
|
value = re.sub(r'://([^/]*:[^/]*?)@', '://***:***@', value)
|
|
elif isinstance(value, list):
|
|
value = ', '.join(value)
|
|
fields.append((field, value))
|
|
return fields
|
|
|
|
def get_endpoints_infos(self):
|
|
endpoints = []
|
|
for dummy, method in inspect.getmembers(type(self), predicate=inspect.isfunction):
|
|
if hasattr(method, 'endpoint_info'):
|
|
method.endpoint_info.object = self
|
|
endpoint_name = method.endpoint_info.name
|
|
if endpoint_name == 'up' and hasattr(self.check_status, 'not_implemented'):
|
|
# hide automatic up endpoint if check_status method is not implemented
|
|
continue
|
|
for http_method in method.endpoint_info.methods:
|
|
# duplicate information to give each method its own entry
|
|
endpoint_info = copy.copy(method.endpoint_info)
|
|
endpoint_info.http_method = http_method
|
|
endpoints.append(endpoint_info)
|
|
endpoints.sort(
|
|
key=lambda x: (
|
|
x.display_category_order,
|
|
x.display_category,
|
|
x.display_order or 99999999,
|
|
x.name or '',
|
|
x.pattern or '',
|
|
)
|
|
)
|
|
if hasattr(self, 'queries'):
|
|
self.append_custom_queries(endpoints)
|
|
return endpoints
|
|
|
|
def append_custom_queries(self, endpoints):
|
|
for query in self.queries.all():
|
|
if hasattr(query, 'as_endpoint'):
|
|
endpoints.append(query.as_endpoint())
|
|
|
|
def get_connector_permissions(self):
|
|
perms = {}
|
|
for endpoint_info in self.get_endpoints_infos():
|
|
permission = endpoint_info.perm
|
|
if permission:
|
|
perms[permission] = getattr(
|
|
self,
|
|
'_%s_description' % permission,
|
|
_('Access (%s) is limited to the following API users:') % permission,
|
|
)
|
|
return [{'key': x[0], 'label': x[1]} for x in perms.items()]
|
|
|
|
def get_availability_status(self):
|
|
resource_type = ContentType.objects.get_for_model(self)
|
|
current_status = ResourceStatus.objects.filter(
|
|
resource_type=resource_type, resource_pk=self.pk
|
|
).first()
|
|
return current_status
|
|
|
|
def down(self):
|
|
status = self.get_availability_status()
|
|
return status and status.down()
|
|
|
|
@endpoint(description=_('Check service availability'), perm='OPEN', display_order=-1)
|
|
def up(self, request, **kwargs):
|
|
if self.down():
|
|
raise APIError('service not available')
|
|
return {'err': 0}
|
|
|
|
def export_json(self):
|
|
d = {
|
|
'@type': 'passerelle-resource',
|
|
'resource_type': '%s.%s' % (self.__class__._meta.app_label, self.__class__._meta.model_name),
|
|
'title': self.title,
|
|
'slug': self.slug,
|
|
'description': self.description,
|
|
'log_level': self.log_level,
|
|
'access_rights': [],
|
|
}
|
|
resource_type = ContentType.objects.get_for_model(self)
|
|
for ar in AccessRight.objects.filter(
|
|
resource_type=resource_type, resource_pk=self.pk
|
|
).select_related():
|
|
d['access_rights'].append(
|
|
{
|
|
'codename': ar.codename,
|
|
'apiuser': ar.apiuser.username,
|
|
}
|
|
)
|
|
concrete_fields = [
|
|
f
|
|
for f in self.__class__._meta.get_fields()
|
|
if f.concrete
|
|
and f.editable
|
|
and (not f.is_relation or f.one_to_one or (f.many_to_one and f.related_model))
|
|
]
|
|
for field in concrete_fields:
|
|
if field.name == 'id':
|
|
continue
|
|
value = getattr(self, field.attname)
|
|
if isinstance(field, BASE_EXPORT_FIELDS):
|
|
d[field.name] = value
|
|
elif isinstance(field, models.FileField):
|
|
if value.name:
|
|
with value as fd:
|
|
d[field.name] = {
|
|
'name': os.path.basename(value.name),
|
|
'content': force_str(base64.b64encode(fd.read())),
|
|
}
|
|
else:
|
|
d[field.name] = None
|
|
elif isinstance(field, SFTPField):
|
|
d[field.name] = value and value.__json__()
|
|
else:
|
|
raise Exception(
|
|
'export_json: field %s of ressource class %s is unsupported' % (field, self.__class__)
|
|
)
|
|
return d
|
|
|
|
@staticmethod
|
|
def import_json(d, import_users=False, overwrite=False):
|
|
if d.get('@type') != 'passerelle-resource':
|
|
raise ValueError('not a passerelle resource export')
|
|
|
|
d = d.copy()
|
|
d.pop('@type')
|
|
app_label, model_name = d['resource_type'].split('.')
|
|
try:
|
|
model = apps.get_model(app_label, model_name)
|
|
except LookupError:
|
|
raise BaseResource.UnknownBaseResourceError(app_label)
|
|
try:
|
|
instance = model.objects.get(slug=d['slug'])
|
|
if not overwrite:
|
|
return
|
|
except model.DoesNotExist:
|
|
instance = None
|
|
with transaction.atomic():
|
|
# prevent semi-creation of ressources
|
|
instance = model.import_json_real(overwrite, instance, d)
|
|
resource_type = ContentType.objects.get_for_model(instance)
|
|
# We can only connect AccessRight objects to the new Resource after its creation
|
|
if import_users:
|
|
for ar in d['access_rights']:
|
|
try:
|
|
apiuser = ApiUser.objects.get(username=ar['apiuser'])
|
|
except ApiUser.DoesNotExist:
|
|
raise ImportSiteError(_('Api user "%s" not found') % ar['apiuser'])
|
|
AccessRight.objects.get_or_create(
|
|
codename=ar['codename'],
|
|
resource_type=resource_type,
|
|
resource_pk=instance.pk,
|
|
apiuser=apiuser,
|
|
)
|
|
return instance
|
|
|
|
@classmethod
|
|
def import_json_real(cls, overwrite, instance, d, **kwargs):
|
|
init_kwargs = {
|
|
'title': d['title'],
|
|
'slug': d['slug'],
|
|
'description': d['description'],
|
|
}
|
|
init_kwargs.update(kwargs)
|
|
if instance:
|
|
for key, value in init_kwargs.items():
|
|
setattr(instance, key, value)
|
|
else:
|
|
instance = cls(**init_kwargs)
|
|
concrete_fields = [
|
|
f
|
|
for f in cls._meta.get_fields()
|
|
if f.concrete
|
|
and f.editable
|
|
and (not f.is_relation or f.one_to_one or (f.many_to_one and f.related_model))
|
|
]
|
|
for field in concrete_fields:
|
|
if field.name == 'id':
|
|
continue
|
|
value = d[field.name]
|
|
if isinstance(field, BASE_EXPORT_FIELDS):
|
|
setattr(instance, field.attname, value)
|
|
elif isinstance(field, models.FileField):
|
|
if value:
|
|
getattr(instance, field.attname).save(
|
|
value['name'], ContentFile(base64.b64decode(value['content'])), save=False
|
|
)
|
|
elif isinstance(field, SFTPField):
|
|
if value:
|
|
value = SFTP(**value)
|
|
setattr(instance, field.attname, value)
|
|
else:
|
|
raise Exception(
|
|
'import_json_real: field %s of ressource class ' '%s is unsupported' % (field, cls)
|
|
)
|
|
instance.save()
|
|
if 'log_level' in d:
|
|
instance.set_log_level(d['log_level'])
|
|
return instance
|
|
|
|
def clean_logs(self):
|
|
# clean logs
|
|
timestamp = timezone.now() - datetime.timedelta(
|
|
days=self.logging_parameters.log_retention_days or settings.LOG_RETENTION_DAYS
|
|
)
|
|
ResourceLog.objects.filter(
|
|
appname=self.get_connector_slug(), slug=self.slug, timestamp__lt=timestamp
|
|
).delete()
|
|
|
|
def check_status(self):
|
|
# should raise an exception if status is not ok
|
|
raise NotImplementedError
|
|
|
|
check_status.not_implemented = True
|
|
|
|
def availability(self):
|
|
# "availability" cron job to update service statuses
|
|
|
|
# eventually skip it
|
|
if not self.availability_parameters.run_check:
|
|
return
|
|
|
|
try:
|
|
with self.logger.disable_admin_emails():
|
|
self.check_status()
|
|
except NotImplementedError:
|
|
return
|
|
except Exception as e:
|
|
from passerelle.utils.conversion import exception_to_text
|
|
|
|
self.set_availability_status('down', message=exception_to_text(e)[:500])
|
|
else:
|
|
self.set_availability_status('up')
|
|
|
|
def set_availability_status(self, status, message=''):
|
|
availability_parameters = self.availability_parameters
|
|
resource_type = ContentType.objects.get_for_model(self)
|
|
current_status = ResourceStatus.objects.filter(
|
|
resource_type=resource_type, resource_pk=self.pk
|
|
).first()
|
|
if not current_status or status != current_status.status:
|
|
if status == 'down' and not self.down(): # new downtime
|
|
if availability_parameters.has_zero_delay():
|
|
self.logger.error(
|
|
'connector "%s" (%s) is now down: %s', self, self.__class__.__name__, message
|
|
)
|
|
else:
|
|
self.logger.warning(
|
|
'connector "%s" (%s) is now down: %s', self, self.__class__.__name__, message
|
|
)
|
|
ResourceStatus(
|
|
resource_type=resource_type, resource_pk=self.pk, status=status, message=message
|
|
).save()
|
|
if status == 'up' and current_status:
|
|
self.logger.info('connector "%s" (%s) is back up', self, self.__class__.__name__)
|
|
elif status == 'down':
|
|
# check last_notification_downtime and current downtime to see if it matches a new notification delay
|
|
last_notification_timestamp = (
|
|
current_status.last_notification_timestamp or current_status.start_timestamp
|
|
)
|
|
|
|
current_time = now()
|
|
downtime = (current_time - current_status.start_timestamp).total_seconds() // 60
|
|
last_notification_downtime = (
|
|
last_notification_timestamp - current_status.start_timestamp
|
|
).total_seconds() // 60
|
|
|
|
for delay in availability_parameters.notification_delays_generator():
|
|
if not delay:
|
|
continue
|
|
if downtime >= delay:
|
|
if last_notification_downtime < delay:
|
|
days = downtime // 1440
|
|
hours = downtime // 60
|
|
if days > 1:
|
|
human_duration = 'for %d days' % days
|
|
elif hours > 1:
|
|
human_duration = 'for %d hours' % hours
|
|
else:
|
|
human_duration = 'for %d minutes' % downtime
|
|
self.logger.error(
|
|
'connector "%s" (%s) has been down %s: %s',
|
|
self,
|
|
self.__class__.__name__,
|
|
human_duration,
|
|
message,
|
|
# when connector is down, logging is shutdown
|
|
force=True,
|
|
)
|
|
ResourceStatus.objects.filter(pk=current_status.pk).update(
|
|
message=message, last_notification_timestamp=current_time
|
|
)
|
|
break
|
|
else:
|
|
ResourceStatus.objects.filter(pk=current_status.pk).update(message=message)
|
|
break
|
|
else:
|
|
ResourceStatus.objects.filter(pk=current_status.pk).update(message=message)
|
|
|
|
def hourly(self):
|
|
pass
|
|
|
|
def daily(self):
|
|
self.clean_logs()
|
|
|
|
def weekly(self):
|
|
pass
|
|
|
|
def monthly(self):
|
|
pass
|
|
|
|
def jobs_set(self):
|
|
resource_type = ContentType.objects.get_for_model(self)
|
|
return Job.objects.filter(resource_type=resource_type, resource_pk=self.pk)
|
|
|
|
def jobs(self):
|
|
# "jobs" cron job to run asynchronous tasks
|
|
if self.down():
|
|
# don't try running jobs if connector is known to be down.
|
|
return
|
|
skip_locked = {'skip_locked': True}
|
|
if not connection.features.has_select_for_update_skip_locked:
|
|
skip_locked = {}
|
|
skipped_jobs = []
|
|
while True:
|
|
# optimistic skip
|
|
if (
|
|
not self.jobs_set()
|
|
.exclude(pk__in=skipped_jobs)
|
|
.filter(
|
|
Q(after_timestamp__isnull=True) | Q(after_timestamp__lt=timezone.now()),
|
|
status='registered',
|
|
)
|
|
.exists()
|
|
):
|
|
break
|
|
with transaction.atomic():
|
|
job = (
|
|
self.jobs_set()
|
|
.exclude(pk__in=skipped_jobs)
|
|
.filter(
|
|
Q(after_timestamp__isnull=True) | Q(after_timestamp__lt=timezone.now()),
|
|
status='registered',
|
|
)
|
|
.select_for_update(**skip_locked)
|
|
.order_by('pk')[:1]
|
|
.first()
|
|
)
|
|
if not job:
|
|
break
|
|
job.status = 'running'
|
|
job.save()
|
|
# release lock
|
|
result = job.run()
|
|
if result == 'skipped':
|
|
skipped_jobs.append(job.id)
|
|
|
|
def add_job(self, method_name, natural_id=None, after_timestamp=None, **kwargs):
|
|
resource_type = ContentType.objects.get_for_model(self)
|
|
job = Job(
|
|
resource_type=resource_type,
|
|
resource_pk=self.pk,
|
|
method_name=method_name,
|
|
natural_id=natural_id,
|
|
parameters=kwargs,
|
|
)
|
|
job.set_after_timestamp(after_timestamp)
|
|
job.save()
|
|
transaction.on_commit(lambda: job.run(spool=True))
|
|
return job
|
|
|
|
def handle_job_error(self, job, exc_info):
|
|
from passerelle.utils.conversion import exception_to_text
|
|
|
|
(exc_type, exc_value, dummy) = exc_info
|
|
job.status = 'failed'
|
|
job.done_timestamp = timezone.now()
|
|
job.status_details = {
|
|
'error_summary': '\n'.join(traceback.format_exception_only(exc_type, exc_value)).strip(),
|
|
}
|
|
level = logging.WARNING if exc_type == APIError else logging.ERROR
|
|
self.logger.log(
|
|
level,
|
|
'error running %s job (%s)',
|
|
job.method_name,
|
|
exception_to_text(exc_value),
|
|
exc_info=exc_info,
|
|
)
|
|
|
|
@property
|
|
def has_open_access_right(self):
|
|
return AccessRight.objects.filter(
|
|
resource_type=ContentType.objects.get_for_model(self), resource_pk=self.pk, apiuser__key=''
|
|
).exists()
|
|
|
|
def get_setting(self, name):
|
|
connector_settings = self.get_settings()
|
|
return connector_settings.get(name)
|
|
|
|
def get_settings(self):
|
|
connectors_settings = settings.CONNECTORS_SETTINGS
|
|
if not isinstance(connectors_settings, dict):
|
|
return None
|
|
connector_identifier = f'{self.get_connector_slug()}/{self.slug}'
|
|
connector_settings = connectors_settings.get(connector_identifier) or {}
|
|
if not isinstance(connectors_settings, dict):
|
|
return {}
|
|
return connector_settings
|
|
|
|
|
|
class AccessRight(models.Model):
|
|
codename = models.CharField(max_length=100, verbose_name='codename')
|
|
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
|
resource_pk = models.PositiveIntegerField()
|
|
resource = fields.GenericForeignKey('resource_type', 'resource_pk')
|
|
apiuser = models.ForeignKey(ApiUser, verbose_name=_('API User'), on_delete=models.CASCADE)
|
|
|
|
class Meta:
|
|
permissions = (('see_accessright', 'Can see access right'),)
|
|
unique_together = (('codename', 'resource_type', 'resource_pk', 'apiuser'),)
|
|
|
|
def __str__(self):
|
|
return '%s (on %s <%s>) (for %s)' % (
|
|
self.codename,
|
|
self.resource_type,
|
|
self.resource_pk,
|
|
self.apiuser,
|
|
)
|
|
|
|
|
|
class LoggingParameters(models.Model):
|
|
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
|
resource_pk = models.PositiveIntegerField()
|
|
resource = fields.GenericForeignKey('resource_type', 'resource_pk')
|
|
log_level = models.CharField(
|
|
verbose_name=_('Log level'), max_length=10, choices=LOGLEVEL_CHOICES, default='INFO'
|
|
)
|
|
trace_emails = models.TextField(
|
|
verbose_name=_('Emails to receive error and critical traces'),
|
|
help_text=_('One address per line (empty for site administrators)'),
|
|
blank=True,
|
|
)
|
|
requests_max_size = models.PositiveIntegerField(
|
|
verbose_name=_('Requests maximum size'),
|
|
help_text=_('Maximum HTTP request size to log'),
|
|
blank=True,
|
|
null=True,
|
|
)
|
|
responses_max_size = models.PositiveIntegerField(
|
|
verbose_name=_('Responses maximum size'),
|
|
help_text=_('Maximum HTTP reponse size to log'),
|
|
blank=True,
|
|
null=True,
|
|
)
|
|
log_retention_days = models.PositiveIntegerField(
|
|
verbose_name=_('Log retention days'),
|
|
help_text=_('Number of days to keep logs'),
|
|
blank=True,
|
|
null=True,
|
|
)
|
|
|
|
class Meta:
|
|
unique_together = ('resource_type', 'resource_pk')
|
|
|
|
|
|
def parse_notification_delays(value):
|
|
delays = [int(v.strip()) for v in value.split(',')]
|
|
if not all(delay >= 0 for delay in delays):
|
|
raise ValueError
|
|
if delays != sorted(delays):
|
|
raise ValueError
|
|
if len(set(delays)) != len(delays):
|
|
raise ValueError
|
|
return delays
|
|
|
|
|
|
def validate_notification_delays(value):
|
|
try:
|
|
if not value:
|
|
raise ValueError
|
|
parse_notification_delays(value)
|
|
except ValueError:
|
|
raise ValidationError(_('You must provide a list of increasing minutes delays'))
|
|
|
|
|
|
class AvailabilityParameters(models.Model):
|
|
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
|
resource_pk = models.PositiveIntegerField()
|
|
resource = fields.GenericForeignKey('resource_type', 'resource_pk')
|
|
run_check = models.BooleanField(
|
|
default=True,
|
|
verbose_name=_('Run regular availability checks'),
|
|
help_text=_('Run an availability check every 5 minutes'),
|
|
)
|
|
notification_delays = models.TextField(
|
|
verbose_name=_('Notification delays'),
|
|
default='0',
|
|
blank=False,
|
|
validators=[validate_notification_delays],
|
|
help_text=_('Increasing delay between error notifications in minutes, ex.: 0,5,10'),
|
|
)
|
|
|
|
def has_zero_delay(self):
|
|
return 0 in parse_notification_delays(self.notification_delays)
|
|
|
|
def notification_delays_generator(self):
|
|
notification_delays = parse_notification_delays(self.notification_delays)
|
|
last_notification_delay = notification_delays[-1]
|
|
if last_notification_delay > 1:
|
|
notification_delays = itertools.chain(
|
|
notification_delays, itertools.count(2 * last_notification_delay, last_notification_delay)
|
|
)
|
|
return notification_delays
|
|
|
|
class Meta:
|
|
unique_together = ('resource_type', 'resource_pk')
|
|
|
|
|
|
class SkipJob(Exception):
|
|
def __init__(self, after_timestamp=None):
|
|
self.after_timestamp = after_timestamp
|
|
super().__init__()
|
|
|
|
|
|
class Job(models.Model):
|
|
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
|
resource_pk = models.PositiveIntegerField()
|
|
resource = fields.GenericForeignKey('resource_type', 'resource_pk')
|
|
method_name = models.CharField(max_length=50)
|
|
natural_id = models.CharField(max_length=256, blank=True, null=True)
|
|
parameters = JSONField(default=dict)
|
|
creation_timestamp = models.DateTimeField(auto_now_add=True)
|
|
update_timestamp = models.DateTimeField(auto_now=True)
|
|
done_timestamp = models.DateTimeField(null=True)
|
|
after_timestamp = models.DateTimeField(null=True)
|
|
status = models.CharField(
|
|
max_length=20,
|
|
default='registered',
|
|
choices=(
|
|
('registered', _('Registered')),
|
|
('running', _('Running')),
|
|
('failed', _('Failed')),
|
|
('restarted', _('Failed and restarted')),
|
|
('completed', _('Completed')),
|
|
),
|
|
)
|
|
status_details = JSONField(default=dict)
|
|
|
|
class Meta:
|
|
ordering = ('id',)
|
|
|
|
def set_after_timestamp(self, value):
|
|
if isinstance(value, datetime.datetime):
|
|
self.after_timestamp = value
|
|
elif isinstance(value, (float, int)):
|
|
self.after_timestamp = timezone.now() + datetime.timedelta(seconds=value)
|
|
elif isinstance(value, datetime.timedelta):
|
|
self.after_timestamp = timezone.now() + value
|
|
else:
|
|
self.after_timestamp = value
|
|
|
|
def restart(self):
|
|
# clone current job
|
|
new_job = copy.deepcopy(self)
|
|
new_job.pk = None
|
|
# set status
|
|
new_job.status = 'registered'
|
|
# reset some fields
|
|
new_job.done_timestamp = None
|
|
new_job.status_details = {}
|
|
new_job.save()
|
|
|
|
# change current job status
|
|
self.status = 'restarted'
|
|
self.status_details.update({'new_job_pk': new_job.pk})
|
|
self.save()
|
|
|
|
def run(self, spool=False):
|
|
if spool and self.pk:
|
|
if 'uwsgi' in sys.modules and settings.PASSERELLE_MANAGE_COMMAND:
|
|
from passerelle.utils.spooler import run_job
|
|
|
|
tenant = getattr(connection, 'tenant', None)
|
|
run_job.spool(job_id=str(self.pk), domain=getattr(tenant, 'domain_url', None))
|
|
return
|
|
|
|
self.status = 'running'
|
|
self.save()
|
|
try:
|
|
job_info = getattr(self.resource, self.method_name)(**self.parameters)
|
|
except SkipJob as e:
|
|
self.status = 'registered'
|
|
self.set_after_timestamp(e.after_timestamp)
|
|
self.save()
|
|
return 'skipped'
|
|
except Exception:
|
|
self.resource.handle_job_error(self, sys.exc_info())
|
|
else:
|
|
self.status = 'completed'
|
|
self.done_timestamp = timezone.now()
|
|
if isinstance(job_info, dict) and 'status_info' in job_info:
|
|
self.status_details.update(job_info['status_info'])
|
|
self.save()
|
|
|
|
|
|
class ResourceLog(models.Model):
|
|
timestamp = models.DateTimeField(auto_now_add=True)
|
|
appname = models.CharField(max_length=128, verbose_name='appname', null=True)
|
|
slug = models.CharField(max_length=128, verbose_name='slug', null=True)
|
|
levelno = models.IntegerField(verbose_name='log level')
|
|
sourceip = models.GenericIPAddressField(blank=True, null=True, verbose_name=_('Source IP'))
|
|
message = models.TextField(verbose_name='message')
|
|
extra = JSONField(verbose_name='extras', default=dict)
|
|
transaction_id = models.UUIDField(null=True, db_index=True)
|
|
|
|
class Meta:
|
|
ordering = ('id',)
|
|
permissions = (('see_resourcelog', 'Can see resource logs'),)
|
|
indexes = [
|
|
models.Index(fields=['appname', '-timestamp']),
|
|
]
|
|
|
|
@property
|
|
def level(self):
|
|
return slugify(logging.getLevelName(self.levelno))
|
|
|
|
@property
|
|
def level_name(self):
|
|
level_name = logging.getLevelName(self.levelno)
|
|
for k, v in LOGLEVEL_CHOICES:
|
|
if level_name == k:
|
|
return v
|
|
return level_name
|
|
|
|
def __str__(self):
|
|
return '%s %s %s %s' % (self.timestamp, self.levelno, self.appname, self.slug)
|
|
|
|
|
|
STATUS_CHOICES = (
|
|
('unknown', _('Unknown')),
|
|
('up', _('Up')),
|
|
('down', _('Down')),
|
|
)
|
|
|
|
|
|
class ResourceStatus(models.Model):
|
|
resource_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
|
resource_pk = models.PositiveIntegerField()
|
|
start_timestamp = models.DateTimeField(auto_now_add=True)
|
|
status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='unknown')
|
|
message = models.CharField(max_length=500, blank=True)
|
|
last_notification_timestamp = models.DateTimeField(blank=True, null=True)
|
|
|
|
class Meta:
|
|
ordering = ['-start_timestamp']
|
|
|
|
def up(self):
|
|
return self.status == 'up'
|
|
|
|
def down(self):
|
|
return self.status == 'down'
|
|
|
|
|
|
class ProxyLogger:
|
|
def __init__(self, connector, extra=None, transaction_id=None):
|
|
self.connector = connector
|
|
self.appname = connector.get_connector_slug()
|
|
self.slug = connector.slug
|
|
self.extra = extra or {}
|
|
logger_name = 'passerelle.resource.%s.%s' % (self.appname, self.slug)
|
|
self.transaction_id = transaction_id
|
|
self._logger = logging.getLogger(logger_name)
|
|
self._logger.setLevel(connector.log_level)
|
|
|
|
def context(self, **kwargs):
|
|
return self.__class__(self.connector, extra=dict(self.extra or {}, **kwargs))
|
|
|
|
@property
|
|
def level(self):
|
|
return self._logger.getEffectiveLevel()
|
|
|
|
@contextmanager
|
|
def disable_admin_emails(self):
|
|
mail_admin_handlers = [
|
|
handler for handler in self._logger.root.handlers if isinstance(handler, AdminEmailHandler)
|
|
]
|
|
for handler in mail_admin_handlers:
|
|
handler.previous_level = handler.level
|
|
handler.setLevel(logging.CRITICAL)
|
|
try:
|
|
yield
|
|
finally:
|
|
for handler in mail_admin_handlers:
|
|
handler.setLevel(handler.previous_level)
|
|
|
|
def _log(self, levelname, message, *args, **kwargs):
|
|
message = str(message)
|
|
force = kwargs.pop('force', False)
|
|
if self.connector.down() and not force:
|
|
# don't log if the connector is known to be down
|
|
return
|
|
|
|
# hide credentials in urls
|
|
for url in re.findall(r'(https?://\S+)', message):
|
|
try:
|
|
parsed = urlparse(url)
|
|
except Exception:
|
|
continue
|
|
if not parsed.username and not parsed.password:
|
|
continue
|
|
replaced = parsed._replace(netloc="{}:{}@{}".format('***', '***', parsed.hostname))
|
|
message = message.replace(url, replaced.geturl())
|
|
|
|
levelno = getattr(logging, levelname)
|
|
if self._logger.level <= levelno:
|
|
attr = {}
|
|
attr['levelno'] = levelno
|
|
|
|
rl_message = message
|
|
rl_args = args
|
|
# Borrowed from python stdlib logging/__init__.py
|
|
if (
|
|
rl_args
|
|
and len(rl_args) == 1
|
|
and isinstance(rl_args[0], collections.abc.Mapping)
|
|
and rl_args[0]
|
|
):
|
|
rl_args = rl_args[0]
|
|
# End Borrow
|
|
if rl_args:
|
|
rl_message = rl_message % rl_args
|
|
|
|
attr['message'] = rl_message
|
|
attr['appname'] = self.appname
|
|
attr['slug'] = self.slug
|
|
|
|
extra = (self.extra or {}).copy()
|
|
extra.update(kwargs.get('extra', {}))
|
|
request = extra.get('request')
|
|
|
|
for k, v in extra.items():
|
|
if isinstance(v, bytes):
|
|
try:
|
|
extra[k] = v.decode('utf-8')
|
|
except UnicodeDecodeError:
|
|
# convert bytes to string representation without enclosing b""
|
|
extra[k] = repr(v)[2:-1]
|
|
|
|
def is_json_serializable(value):
|
|
return isinstance(value, (tuple, list, dict, bool, int, float, str))
|
|
|
|
attr['extra'] = {key: value for key, value in extra.items() if is_json_serializable(value)}
|
|
|
|
if self.transaction_id:
|
|
attr['transaction_id'] = self.transaction_id
|
|
attr['extra']['transaction_id'] = self.transaction_id
|
|
|
|
if getattr(request, 'META', None):
|
|
if 'x-forwarded-for' in request.headers:
|
|
sourceip = request.headers.get('X-Forwarded-For', '').split(",")[0].strip()
|
|
else:
|
|
sourceip = request.META.get('REMOTE_ADDR')
|
|
else:
|
|
sourceip = None
|
|
attr['sourceip'] = sourceip
|
|
|
|
if kwargs.get('exc_info'):
|
|
(exc_type, exc_value, dummy) = sys.exc_info()
|
|
attr['extra']['error_summary'] = traceback.format_exception_only(exc_type, exc_value)
|
|
|
|
ResourceLog.objects.create(**attr)
|
|
|
|
admins = settings.ADMINS
|
|
logging_parameters = self.connector.logging_parameters
|
|
if logging_parameters.trace_emails:
|
|
admins = [('', x) for x in logging_parameters.trace_emails.splitlines()]
|
|
with override_settings(ADMINS=admins):
|
|
getattr(self._logger, levelname.lower())(message, *args, **kwargs)
|
|
|
|
def exception(self, message, *args, **kwargs):
|
|
kwargs['exc_info'] = 1
|
|
self._log('ERROR', message, *args, **kwargs)
|
|
|
|
def debug(self, message, *args, **kwargs):
|
|
self._log('DEBUG', message, *args, **kwargs)
|
|
|
|
def info(self, message, *args, **kwargs):
|
|
self._log('INFO', message, *args, **kwargs)
|
|
|
|
def warning(self, message, *args, **kwargs):
|
|
self._log('WARNING', message, *args, **kwargs)
|
|
|
|
def critical(self, message, *args, **kwargs):
|
|
self._log('CRITICAL', message, *args, **kwargs)
|
|
|
|
def error(self, message, *args, **kwargs):
|
|
self._log('ERROR', message, *args, **kwargs)
|
|
|
|
def fatal(self, message, *args, **kwargs):
|
|
self._log('FATAL', message, *args, **kwargs)
|
|
|
|
def log(self, level, message, *args, **kwargs):
|
|
self._log(logging.getLevelName(level), message, *args, **kwargs)
|
|
|
|
|
|
class HTTPResource(models.Model):
|
|
"""Mixin to add basic TLS/Basic HTTP authentication fields to any
|
|
resource."""
|
|
|
|
basic_auth_username = models.CharField(
|
|
max_length=128, verbose_name=_('Basic authentication username'), blank=True
|
|
)
|
|
basic_auth_password = models.CharField(
|
|
max_length=128, verbose_name=_('Basic authentication password'), blank=True
|
|
)
|
|
client_certificate = models.FileField(verbose_name=_('TLS client certificate'), null=True, blank=True)
|
|
trusted_certificate_authorities = models.FileField(
|
|
verbose_name=_('TLS trusted CAs'), null=True, blank=True
|
|
)
|
|
verify_cert = models.BooleanField(verbose_name=_('TLS verify certificates'), default=True, blank=True)
|
|
http_proxy = models.CharField(max_length=128, verbose_name=_('HTTP and HTTPS proxy'), blank=True)
|
|
|
|
class Meta:
|
|
abstract = True
|
|
|
|
|
|
class BaseQuery(models.Model):
|
|
"""Base for building custom queries.
|
|
|
|
It must define "resource" attribute as a ForeignKey to a BaseResource subclass,
|
|
and probably extend its "as_endpoint" method to document its parameters.
|
|
"""
|
|
|
|
name = models.CharField(verbose_name=_('Name'), max_length=128)
|
|
slug = models.SlugField(verbose_name=_('Slug'), max_length=128)
|
|
description = models.TextField(verbose_name=_('Description'), blank=True)
|
|
|
|
http_method = 'get'
|
|
|
|
class Meta:
|
|
abstract = True
|
|
unique_together = [
|
|
('resource', 'name'),
|
|
('resource', 'slug'),
|
|
]
|
|
ordering = ['name']
|
|
verbose_name = _('Query')
|
|
|
|
def as_endpoint(self, path=''):
|
|
name = '%s/%s/' % (path, self.slug) if path else self.slug + '/'
|
|
e = endpoint(name=name, description=self.name, long_description=self.description)
|
|
e.http_method = self.http_method
|
|
e.object = self.resource
|
|
return e
|
|
|
|
def __str__(self):
|
|
return self.name
|
|
|
|
def export_json(self):
|
|
d = {}
|
|
fields = [
|
|
f
|
|
for f in self.__class__._meta.get_fields()
|
|
if f.concrete
|
|
and (not f.is_relation or f.one_to_one or (f.many_to_one and f.related_model))
|
|
and f.name not in ['id', 'resource']
|
|
]
|
|
for field in fields:
|
|
d[field.name] = getattr(self, field.name)
|
|
return d
|
|
|
|
@classmethod
|
|
def import_json(cls, d):
|
|
return cls(**d)
|
|
|
|
def delete_url(self):
|
|
return reverse(self.delete_view, kwargs={'slug': self.resource.slug, 'pk': self.pk})
|
|
|
|
def edit_url(self):
|
|
return reverse(self.edit_view, kwargs={'slug': self.resource.slug, 'pk': self.pk})
|