This repository has been archived on 2023-02-22. You can view files and clone it, but cannot push or open issues or pull requests.
passerelle-atreal-openads/atreal_openads/models.py

1342 lines
58 KiB
Python

#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of passerelle-atreal-openads - a Publik connector to openADS
#
# Copyright (C) 2019 Atreal
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Models for this connector module.""" # pylint: disable=too-many-lines
import base64
import binascii
import datetime
import json
import os
import urlparse
from urllib import quote
import magic
from django.db import models
from django.http import Http404
from django.utils.translation import ugettext_lazy as _
from django.core.files.base import ContentFile
from django.core.exceptions import ValidationError
from django.utils.encoding import force_text
from django.utils import six
from passerelle.base.models import BaseResource, HTTPResource
from passerelle.utils.api import endpoint
from passerelle.utils.jsonresponse import APIError
from .json_schemas import (
JSON_SCHEMA_CREATE_DOSSIER_IN,
# JSON_SCHEMA_CHECK_STATUS_OUT,
# JSON_SCHEMA_CREATE_DOSSIER_OUT,
# JSON_SCHEMA_GET_DOSSIER_OUT,
# JSON_SCHEMA_GET_FWD_FILES_OUT,
# JSON_SCHEMA_GET_FWD_FILES_STATUS_OUT,
# JSON_SCHEMA_GET_COURRIER_OUT
)
from .utils import (
force_encoded_string_output,
strip_tags,
clean_spaces,
normalize,
get_file_digest,
get_upload_path,
get_file_extension,
trunc_str_values,
DictDumper,
BaseModel
)
@six.python_2_unicode_compatible
class ForwardFile(models.Model, BaseModel): # pylint: disable=too-many-instance-attributes
"""Represent a file uploaded by a user, to be forwarded to openADS.API."""
STATUSES = [
('pending', _('Pending')),
('uploading', _('Uploading')),
('failed', _('Failed')),
('success', _('Success'))
]
connecteur = models.ForeignKey('AtrealOpenads',
on_delete=models.CASCADE,
related_name="forward_files",
related_query_name="forward_file")
collectivite = models.ForeignKey('Collectivite', blank=True, null=True,
on_delete=models.CASCADE,
related_name="forward_files",
related_query_name="forward_file")
numero_demande = models.CharField(_('Tracking code'), max_length=20)
numero_dossier = models.CharField(_('Numero dossier'), max_length=20)
type_fichier = models.CharField(_('Type'), max_length=10)
file_hash = models.CharField(_('Hash'), max_length=100, default='', blank=True)
orig_filename = models.CharField(_('Filename'), max_length=100, default='', blank=True)
content_type = models.CharField(_('Content type'), max_length=100, default='', blank=True)
size = models.PositiveIntegerField(_('Size'), default=0)
upload_file = models.FileField(_('File'), upload_to=get_upload_path, blank=True, null=True)
upload_attempt = models.PositiveIntegerField(_('Upload attempt'), default=0, blank=True)
upload_status = models.CharField(_('Upload status'), max_length=10, choices=STATUSES,
default='pending')
upload_msg = models.CharField(_('Upload message'), max_length=255, default='', blank=True)
last_update_datetime = models.DateTimeField(_('Last update'), auto_now=True)
class Meta:
# pylint: disable=too-few-public-methods,no-init,old-style-class,missing-docstring
verbose_name = _('Forward File')
indexes = [
models.Index(fields=['connecteur'], name='ff_connecteur_idx'),
models.Index(fields=['collectivite'], name='ff_collectivite_idx'),
models.Index(fields=['numero_demande', 'numero_dossier'], name='ff_deman_doss_idx'),
models.Index(fields=['numero_demande'], name='ff_demande_idx'),
models.Index(fields=['numero_dossier'], name='ff_dossier_idx'),
models.Index(fields=['orig_filename'], name='ff_filename_idx'),
models.Index(fields=['upload_status'], name='ff_status_idx'),
models.Index(fields=['last_update_datetime'], name='ff_last_up_dt_idx')
]
ordering = ['-last_update_datetime']
def get_status(self, status_codename=None):
"""Return the upload status human name translated.
If specified codename is not found, return it.
"""
if not status_codename:
status_codename = self.upload_status
for status in self.STATUSES:
if status[0] == status_codename:
return status[1]
return status_codename
@force_encoded_string_output
def __repr__(self):
return (u'ForwardFile(id=%s,connecteur=%s,collectivite=%s'
',demande=%s,dossier=%s,type=%s,filename=%s,status=%s)') % (
self.id, # pylint: disable=no-member
force_text(self.connecteur) if hasattr(self, 'connecteur') else None,
force_text(self.collectivite) if hasattr(self, 'collectivite') else None,
self.numero_demande, self.numero_dossier,
self.type_fichier, self.orig_filename, self.upload_status)
def __str__(self):
return u"%s[%s]" % (trunc_str_values(self.orig_filename, 20), self.get_status())
def get_url_params(self, primary_key=True):
params = super(ForwardFile, self).get_url_params(primary_key=primary_key)
params['connecteur'] = self.connecteur.slug if self.connecteur else None
return params
def update_content_type(self, only_if_empty=False):
"""Update the content type from the content of the file."""
if not self.content_type or not only_if_empty:
if self.upload_file and self.upload_file.size: # pylint: disable=no-member
# pylint: disable=no-member
self.content_type = magic.from_buffer(self.upload_file.read(1024), mime=True)
else:
self.content_type = ''
def update_file_hash(self, only_if_empty=False):
"""Update the file_hash field from the content of the file."""
if not self.file_hash or not only_if_empty:
if self.upload_file and self.upload_file.size: # pylint: disable=no-member
self.file_hash = get_file_digest(self.upload_file)
else:
self.file_hash = ''
# preprocessing data and validate model before saving
# /!\ Attention:
# this will not be triggered when doing bulk actions like with QuerySet.update()
# @see: https://docs.djangoproject.com/en/2.2/topics/db/models/
# The note entitled "Overridden model methods are not called on bulk operations"
def save(self, *args, **kwargs): # pylint: disable=arguments-differ
"""Save the entity, overwritten to preprocessing data and validate model."""
# delete file content (on success)
if self.upload_status == 'success':
# pylint: disable=no-member
if self.upload_file and self.upload_file.size > 0:
# pylint: disable=no-member
self.upload_file.delete()
# else, update metadata
else:
self.size = self.upload_file.size if self.upload_file else 0 # noqa: E501, pylint: disable=no-member
self.update_file_hash()
self.update_content_type(only_if_empty=True)
# validation (calling self.clean())
self.full_clean()
super(ForwardFile, self).save(*args, **kwargs)
# check that one the following fields must not be blank/null:
# 'file_hash', 'orig_filename', 'upload_file'
# because if they are all empty we dont have any usefull information about the upload
def clean(self, *args, **kwargs): # pylint: disable=arguments-differ
"""Check all the fields, overwritten to check grouped blank/null fields."""
ret = super(ForwardFile, self).clean(*args, **kwargs)
if (not self.file_hash
and not self.orig_filename
# pylint: disable=no-member
and (not self.upload_file or not self.upload_file.size)):
raise ValidationError(
_("A %(object)s cannot have all the following fields empty: %(fields)s." % {
'object': self.get_verbose_name(),
'fields': ['file_hash', 'orig_filename', 'upload_file']})
)
return ret
@six.python_2_unicode_compatible
class Collectivite(models.Model, BaseModel):
"""Represent a "collectivite"."""
name = models.CharField(_('Name'), max_length=150, default='', blank=True)
connecteur = models.ForeignKey('AtrealOpenads',
on_delete=models.CASCADE,
related_name="collectivites",
related_query_name="collectivite")
openADS_id = models.PositiveIntegerField(_('openADS identifier'), help_text=_('ex: 3'))
# 'guichet' will be a property provided by the one-to-one relation of Guichet
# 'forward_files' will be a property provided by the related_name of the foreignKey
class Meta:
# pylint: disable=too-few-public-methods,no-init,old-style-class,missing-docstring
verbose_name = _('Collectivite')
unique_together = ['connecteur', 'openADS_id']
indexes = [
models.Index(fields=['connecteur', 'openADS_id'], name='col_conn_openADSid_idx'),
models.Index(fields=['connecteur'], name='col_connecteur_idx'),
models.Index(fields=['openADS_id'], name='col_openADS_id_idx')
]
ordering = ['name']
@classmethod
def get_fields(cls):
# get_fields() return is immutable, hence the copy
fields = [f for f in super(Collectivite, cls).get_fields()]
# moving related fields field at the end of the list
if fields:
rels = []
for rel_name in ['forward_file', 'guichet']:
if fields[0] and hasattr(fields[0], 'name') and fields[0].name == rel_name:
rels.append(fields.pop(0))
for rel in reversed(rels):
fields.append(rel)
return fields
@force_encoded_string_output
def __repr__(self):
return u'Collectivite(id=%s,name=%s,connecteur=%s,openADS_id=%s,guichet=%s)' % (
self.id, force_text(self.name), # pylint: disable=no-member
force_text(self.connecteur) if hasattr(self, 'connecteur') else None,
self.openADS_id,
# pylint: disable=no-member
force_text(self.guichet) if hasattr(self, 'guichet') else None)
def __str__(self):
return self.name if isinstance(self.name, six.text_type) else force_text(self.name)
def get_fields_kv(self):
fields = super(Collectivite, self).get_fields_kv()
# moving related fields field at the end of the list
if fields:
rels = []
for rel_name in ['forward_file', 'guichet']:
if (fields[0] and fields[0][0]
and hasattr(fields[0][0], 'name') and fields[0][0].name == rel_name):
rels.append(fields.pop(0))
for rel in reversed(rels):
fields.append(rel)
return fields
def get_url_params(self, primary_key=True):
params = super(Collectivite, self).get_url_params(primary_key=primary_key)
# pylint: disable=no-member
params['connecteur'] = self.connecteur.slug if self.connecteur else None
return params
@six.python_2_unicode_compatible
class Guichet(models.Model, BaseModel):
"""Represent a "Guichet"."""
DAYS = [
(1, _('Monday')),
(2, _('Tuesday')),
(3, _('Wednesday')),
(4, _('Thursday')),
(5, _('Friday')),
(6, _('Saturday')),
(7, _('Sunday'))
]
collectivite = models.OneToOneField('Collectivite',
on_delete=models.CASCADE,
related_name="guichet")
ouverture_jour_h = models.TimeField(_('Hour of opening (each day)'), help_text=_('ex: 08:30'))
fermeture_jour_h = models.TimeField(_('Hour of closing (each day)'), help_text=_('ex: 17:00'))
ouverture_sem_d = models.PositiveIntegerField(_('Day of opening (each week)'),
help_text=_('ex: Lundi'),
choices=DAYS, default=1)
fermeture_sem_d = models.PositiveIntegerField(_('Day of closing (each week)'),
help_text=_('ex: Samedi'),
choices=DAYS, default=6)
ouverture_sem_h = models.TimeField(_('Hour of opening (on opening day)'),
help_text=_('ex: 08:30'))
fermeture_sem_h = models.TimeField(_('Hour of closing (on closing day)'),
help_text=_('ex: 12:15'))
class Meta:
# pylint: disable=too-few-public-methods,no-init,old-style-class,missing-docstring
verbose_name = _('Guichet')
verbose_name_plural = _('Guichets')
indexes = [
models.Index(fields=['collectivite'], name='su_collectivite_idx')
]
ordering = ['collectivite']
@force_encoded_string_output
def __repr__(self):
return u'Guichet(id=%s,collectivite=%s,%s)' % (
self.id, # pylint: disable=no-member
force_text(self.collectivite) if hasattr(self, 'collectivite') else None,
force_text(self))
def __str__(self):
return u'%s %s -> %s %s [%s/%s]' % (
force_text(self.DAYS[self.ouverture_sem_d - 1][1]),
# pylint: disable=no-member
self.ouverture_sem_h.strftime('%H:%M') if self.ouverture_sem_h else None,
force_text(self.DAYS[self.fermeture_sem_d - 1][1]),
# pylint: disable=no-member
self.fermeture_sem_h.strftime('%H:%M') if self.fermeture_sem_h else None,
# pylint: disable=no-member
self.ouverture_jour_h.strftime('%H:%M') if self.ouverture_jour_h else None,
# pylint: disable=no-member
self.fermeture_jour_h.strftime('%H:%M') if self.fermeture_jour_h else None)
def get_url_params(self, primary_key=True):
params = super(Guichet, self).get_url_params(primary_key=primary_key)
# pylint: disable=no-member
params['collectivite'] = self.collectivite.id if self.collectivite else None
# pylint: disable=no-member
params['connecteur'] = self.collectivite.connecteur.slug if self.collectivite else None
return params
def get_list_url(self):
raise Exception(u"Guichet:get_list_url() method should not be called")
# @raise TypeError if argument is not a datetime object
def is_open(self, date_t):
""" Return 'True' if the "Guichet" is open, else False."""
if date_t:
if not isinstance(date_t, datetime.datetime):
raise TypeError(u"is_open() expect a datetime object (not a %s)" % type(date_t))
ouverture_jour_date_t = datetime.datetime.combine(date_t, self.ouverture_jour_h)
fermeture_jour_date_t = datetime.datetime.combine(date_t, self.fermeture_jour_h)
day = date_t.isoweekday()
return (
# opening day
(day == self.ouverture_sem_d
and date_t.time() > self.ouverture_sem_h and date_t < fermeture_jour_date_t)
# closing day
or (day == self.fermeture_sem_d
and date_t.time() < self.fermeture_sem_h and date_t > ouverture_jour_date_t)
# regular days
or (day > self.ouverture_sem_d
and day < self.fermeture_sem_d
and date_t > ouverture_jour_date_t
and date_t < fermeture_jour_date_t)
)
return False
@six.python_2_unicode_compatible
class AtrealOpenads(BaseResource, HTTPResource, BaseModel):
"""API that proxy/relay communications with/to openADS."""
default_collectivite_openADS_id = models.PositiveIntegerField(
_("Default 'collectivite' (identifier in openADS)"),
help_text=_('ex: 3'), default=0, blank=True)
openADS_API_url = models.URLField(
_('openADS API URL'), max_length=255,
help_text=_('ex: https://openads.your_domain.net/api/'), default='')
openADS_API_timeout = 3600
# 'collectivites' will be a property provided by the related_name of the foreignKey
# 'forward_files' will be a property provided by the related_name of the foreignKey
api_description = _('''This API provides exchanges with openADS.''')
category = _('Business Process Connectors')
class Meta:
# pylint: disable=too-few-public-methods,no-init,old-style-class,missing-docstring
verbose_name = _('openADS')
verbose_name_plural = _('openADS')
ordering = ['openADS_API_url']
@classmethod
def get_class_name_plural(cls):
return cls.get_class_name()
@force_encoded_string_output
def __repr__(self):
return u'AtrealOpenads(id=%s,openADS=%s,login=%s,collectivites=%s,default=%s)' % (
self.id, # pylint: disable=no-member
force_text(self.openADS_API_url),
force_text(self.basic_auth_username),
self.collectivites.count(), # pylint: disable=no-member
self.default_collectivite_openADS_id)
def __str__(self):
return self.slug if isinstance(self.slug, six.string_types) else force_text(self.slug)
def get_url_name(self, prefix='', plural=False):
return '%s%s' % (prefix + '-' if prefix else '', 'connector')
def get_url_params(self, primary_key=True):
params = {'connector': 'atreal-openads'}
if primary_key:
params['slug'] = self.slug
return params
def get_list_url(self):
raise Exception(u"AtrealOpenads:get_list_url() method should not be called")
def get_collectivite(self, openads_id):
"""Return the 'collectivite' matching an openADS id."""
# pylint: disable=no-member
return Collectivite.objects.get(connecteur=self, openADS_id=openads_id)
def log_json_payload(self, payload, title='payload', max_str_len=100):
"""Log a json paylod surrounded by dashes and with file content filtered."""
self.logger.debug(u"----- %s (begining) -----", title)
self.logger.debug(u"%s", DictDumper(payload, max_str_len))
self.logger.debug(u"----- %s (end) -----", title)
def get_files_from_payload(self, payload, title='payload'):
"""Return files from a JSON payload with all checks and logging."""
# check the 'files' key
if 'files' not in payload:
self.log_json_payload(payload, title)
raise APIError(u"Expecting '%s' key in JSON %s" %
('files', title))
files = payload['files']
if not isinstance(files, list):
self.log_json_payload(payload, title)
raise APIError(
u"Expecting '%s' value in JSON %s to be a %s (not a %s)" %
('files', title, 'list', type(files)))
if len(files) <= 0:
self.log_json_payload(payload, title)
raise APIError(u"Expecting non-empty '%s' value in JSON %s" %
('files', title))
# log the response
self.log_json_payload(payload, title)
# return the files
return files
def check_file_dict(self, dict_file, title='payload', b64=True): # pylint: disable=no-self-use
"""Ensure a file dict has all its required items."""
# key to get the content
content_key = 'content'
# if content is in base 64
if b64:
content_key = 'b64_content'
# check content existence
if content_key not in dict_file:
raise APIError(u"Expecting 'file.%s' key in JSON %s" % (content_key, title))
# get its content
file_content = dict_file[content_key]
if not isinstance(file_content, (six.string_types, six.binary_type)):
raise APIError(
u"Expecting '%s' value in JSON %s in file dict to be a %s (not a %s)" %
('file.%s' % content_key, title, 'string', type(file_content)))
# check filename
if 'filename' in dict_file and not isinstance(dict_file['filename'], six.string_types):
raise APIError(
u"Expecting '%s' value in JSON %s in file dict to be a %s (not a %s)" %
('file.filename', title, 'string', type(dict_file['filename'])))
def get_first_file_from_payload(self,
payload,
title='payload',
ensure_content=True,
b64=True):
"""Return the first file from a JSON payload with all checks and logging."""
# get all files
files = self.get_files_from_payload(payload, title)
# get the first file
first = files[0]
# asked to check its content
if ensure_content:
self.check_file_dict(first, title=title, b64=b64)
# return the first file
return first
@endpoint(
description=_("Test an openADS 'connexion'")
) # pylint: disable=keyword-arg-before-vararg
# pylint: disable=unused-argument,arguments-differ
def check_status(self, request=None, *args, **kwargs):
"""Check avaibility of the openADS.API service."""
url = urlparse.urljoin(self.openADS_API_url, '__api__')
response = self.requests.get(url)
response.raise_for_status()
return {'response': response.status_code}
@endpoint(
perm='can_access',
methods=['post'],
pattern='^(?P<type_dossier>\w+)/?$', # noqa: W605,E501, pylint: disable=anomalous-backslash-in-string
example_pattern='{type_dossier}/',
parameters={
'type_dossier': {'description': _("Type of 'dossier'"), 'example_value': 'DIA'},
'type_dossier_detaille': {'description': _("Detailled type of 'dossier'"),
'example_value': 'CUb'},
'collectivite': {
'description': _("Use this collectivite (instead of the default one)"),
'example_value': '3'
},
'now': {'description': _(("Datetime (or string formatted to: '%s') "
"against which the 'guichet' is checked for opening") % (
'%Y-%m-%d %H:%M:%S')), 'example_value': 'DIA'},
},
post={'description': _("Create an openADS 'dossier'"),
'request_body': {
'schema': {
'application/json': JSON_SCHEMA_CREATE_DOSSIER_IN
} # pylint: disable=too-many-statements,too-many-branches,too-many-locals,too-many-arguments
}
}
) # pylint: disable=keyword-arg-before-vararg
# pylint: disable=unused-argument
def create_dossier(self, request, type_dossier, type_dossier_detaille=None, collectivite=None,
now=None, *args, **kwargs):
"""Create an openADS 'dossier'."""
# loads the request body as JSON content
json_data = json.loads(request.body)
# log the request body (filtering the files content)
self.log_json_payload(json_data, 'request')
# get the collectivite ID or use the connecteur's default one
collectivite_id = collectivite if collectivite else self.default_collectivite_openADS_id
# get the collectivite instance
try:
collectivite = self.get_collectivite(collectivite_id)
# no collectivite instance matching that ID
except Collectivite.DoesNotExist: # pylint: disable=no-member
pass
# a collectivite instance was found
else:
# the collectivite has a guichet
if hasattr(collectivite, 'guichet') and collectivite.guichet:
# get the datetime against which the 'guichet' is checked for opening
now_fmt = '%Y-%m-%d %H:%M:%S'
if not now:
now = datetime.datetime.now()
elif isinstance(now, six.string_types):
now = datetime.datetime.strptime(now, now_fmt)
elif not isinstance(now, datetime.datetime):
raise APIError(
u"Invalid value of type '%s' for now argument of endpoint '%s' "
"(must be: %s)" % (
type(now),
'create_dossier',
"datetime or string formatted to '%s'" % now_fmt))
# if the guichet is not open
if not collectivite.guichet.is_open(now):
return {'message': _(u"Guichet closed for collectivite '%s'" % collectivite)}
# build the payload
payload = {
"collectivite": int(collectivite_id),
#"type_detaille": type_dossier
}
if type_dossier_detaille:
payload["type_detaille"] = type_dossier_detaille
payload["terrain"] = {
"numero_voie": normalize(json_data['fields']['terrain_numero_voie']),
"nom_voie": normalize(json_data['fields']['terrain_nom_voie']),
"code_postal": normalize(json_data['fields']['terrain_code_postal']),
"localite": normalize(json_data['fields']['terrain_localite']),
"references_cadastrales": []
}
if 'terrain_lieu_dit' in json_data['fields'] and json_data['fields']['terrain_lieu_dit']:
payload["terrain"]["lieu_dit"] = normalize(json_data['fields']['terrain_lieu_dit'])
for ref in json_data['fields']['reference_cadastrale']:
payload["terrain"]["references_cadastrales"].append({
"prefixe": normalize(ref[0]),
"section": normalize(ref[1]),
"numero": normalize(ref[2])
})
if json_data['fields']['autres_parcelles']:
for ref in json_data['fields']['references_cadastrales']:
payload["terrain"]["references_cadastrales"].append({
"prefixe": normalize(ref[0]),
"section": normalize(ref[1]),
"numero": normalize(ref[2])
})
# setup demandeur variable prefix
prefixes = {"demandeurs": ''}
if ('proprietaire' in json_data['fields'] and
normalize(json_data['fields']['proprietaire']) != 'Oui'):
prefixes["mandataires"] = 'mandataire_'
# for each type of demandeur with associated prefix
for key, prefix in prefixes.items():
# "qualite" of the demandeur
qualite = normalize(json_data['fields']['%squalite' % prefix])
# 'type_personne' of the demandeur
type_personne = 'particulier' if qualite == 'Un particulier' else 'personne_morale'
# get the demandeur informations
demandeur = {
"type_personne": type_personne,
"typologie": 'petitionnaire' if key == 'demandeurs' else 'delegataire',
"nom": normalize(json_data['fields']['%snom' % prefix]),
"prenom": normalize(json_data['fields']['%sprenom' % prefix]),
"adresse": {
"numero_voie": normalize(json_data['fields']['%snumero_voie' % prefix]),
"nom_voie": normalize(json_data['fields']['%snom_voie' % prefix]),
"code_postal": normalize(json_data['fields']['%scode_postal' % prefix]),
"localite": normalize(json_data['fields']['%slocalite' % prefix])
},
"coordonnees": {
"email": normalize(json_data['fields']['%semail' % prefix])
}
}
# add fields if the demandeur is not an individual
if qualite != 'Un particulier':
demandeur["raison_sociale"] = normalize(
json_data['fields']['%sraison_sociale' % prefix])
demandeur["denomination"] = normalize(
json_data['fields']['%sdenomination' % prefix])
self.logger.debug("%s %s => '%s', '%s'",
demandeur['prenom'],
demandeur['nom'],
demandeur['raison_sociale'],
demandeur['denomination'])
# add optional lieu_dit field
if ('%slieu_dit' % prefix in json_data['fields']
and json_data['fields']['%slieu_dit' % prefix]):
demandeur["adresse"]["lieu_dit"] = normalize(
json_data['fields']['%slieu_dit' % prefix])
# add it to the payload
payload[key] = [demandeur]
self.logger.debug(u"Added '%s' to payload: %s %s",
key,
demandeur['prenom'],
demandeur['nom'])
# log the payload
self.log_json_payload(payload)
# every field key that might contain a file content
file_keys = ['cerfa'] + ['annexe_%s' % i for i in range(1, 5)]
# prepare files that will be forwarded
files = []
for k in file_keys:
if (k in json_data['fields']
and json_data['fields'][k]
and isinstance(json_data['fields'][k], dict)
and 'content' in json_data['fields'][k]):
# get the content decoded from base 64
content = base64.b64decode(json_data['fields'][k]['content'])
# guess the mime type based on the begining of the content
content_type = magic.from_buffer(content, mime=True)
# set it as an upload
upload_file = ContentFile(content)
# get the file hash
file_hash = get_file_digest(upload_file)
# get the content type if specified
if 'content_type' in json_data['fields'][k]:
content_type = json_data['fields'][k]['content_type']
# check the content type is PDF for file of type CERFA
if k == 'cerfa' and content_type != 'application/pdf':
self.logger.warning("CERFA content type is '%s' instead of '%s'",
content_type,
'application/pdf')
# get the filename if specified
filename = None
if 'filename' in json_data['fields'][k]:
filename = json_data['fields'][k]['filename']
# define the file extension
file_extension = get_file_extension(filename, content_type)
# filename not specified
if not filename:
# build a filename (less than 50 chars)
filename = file_hash[40:] + file_extension
# update the specified filename with an extension, if none
elif '.' not in filename:
filename += file_extension
# get the type fichier (less than 10 chars)
type_fichier = None
if k + '_type_raw' in json_data['fields']:
type_fichier = json_data['fields'][k + '_type_raw']
if len(type_fichier) > 10:
raise APIError(u"Type '%s' for file '%s' is too long "
"(%d chars, but max is %d)" % (
type_fichier, k, len(type_fichier), 10))
elif k.lower() == 'cerfa':
type_fichier = 'CERFA'
else:
raise APIError(u"No type field/value for file '%s'" % k)
# append the file to the list
files.append({
'type_fichier': type_fichier,
'orig_filename': filename,
'content_type': content_type,
'file_hash': file_hash,
'upload_file': upload_file
})
# log files to be forwarded
self.logger.debug("----- files (begining) -----")
self.logger.debug('%s', files) if files else self.logger.debug("(no files)") # pylint: disable=expression-not-assigned
self.logger.debug("----- files (end) -----")
# make a request to openADS.API (with the payload)
url = urlparse.urljoin(self.openADS_API_url, 'dossiers/%s' % quote(type_dossier))
response = self.requests.post(
url,
json=payload,
timeout=self.openADS_API_timeout
)
# response is an error code
if response.status_code // 100 != 2:
error = self.get_response_error(response)
self.logger.warning(u"Request [POST] '%s' failed with error: '%s'", url, error)
raise APIError(error)
# load the response JSON content
try:
result = response.json()
except ValueError:
raise APIError(u'No JSON content returned: %r' % response.content[:1000])
# get the recepisse
recepisse = self.get_first_file_from_payload(result, title='response')
# ensure recepisse content type is PDF
if ('content_type' in recepisse
and recepisse['content_type']
and recepisse['content_type'] != 'application/pdf'):
self.logger.debug(
u"Forcing 'recepisse' content type to '%s' instead of '%s'.",
'application/pdf',
recepisse['content_type']
)
recepisse['content_type'] = 'application/pdf'
# decode the recepisse from base 64
try:
base64.b64decode(recepisse['b64_content'])
except (TypeError, binascii.Error):
raise APIError('Failed to decode recepisse content from base 64')
self.logger.debug("Successfully decoded recepisse from base 64")
# check/get the 'numero_dossier'
if 'numero_dossier' not in result:
raise APIError("Expecting 'numero_dossier' key in JSON response")
numero_dossier = result.get('numero_dossier')
if not isinstance(numero_dossier, six.string_types):
raise APIError(
u"Expecting '%s' value in JSON response to be a %s (not a %s)" %
('numero_dossier', 'string', type(numero_dossier)))
numero_dossier = normalize(numero_dossier)
self.logger.debug(u"Numero dossier: %s", numero_dossier)
# save files to be forwarded to openADS.API
if files:
file_ids = []
for upfile in files:
rand_id = base64.urlsafe_b64encode(os.urandom(6))
forwardfile = ForwardFile()
forwardfile.connecteur = self
if isinstance(collectivite, Collectivite):
forwardfile.collectivite = collectivite
forwardfile.numero_demande = rand_id
forwardfile.numero_dossier = numero_dossier
for k in ['type_fichier', 'orig_filename', 'content_type', 'file_hash']:
setattr(forwardfile, k, upfile[k])
# pylint: disable=no-member
forwardfile.upload_file.save(forwardfile.orig_filename, upfile['upload_file'])
forwardfile.upload_status = 'pending'
forwardfile.save()
self.logger.debug(
u"Created ForwardFile '%s' for file '%s' (%s)",
forwardfile.id, # pylint: disable=no-member
forwardfile.orig_filename,
forwardfile.upload_file.path # pylint: disable=no-member
)
file_ids.append(forwardfile.id) # pylint: disable=no-member
job = self.add_job('upload_user_files',
natural_id=numero_dossier,
request=None,
type_dossier=type_dossier,
numero_dossier=numero_dossier,
file_ids=file_ids)
self.logger.debug(u"Added a job '%s' for dossier '%s' (%s) with file ids '%s'",
job.id, # pylint: disable=no-member
numero_dossier,
type_dossier,
file_ids)
# respond with the 'numero_dossier' and the recepisse file
return {
'numero_dossier': numero_dossier,
'recepisse': recepisse
}
@endpoint(
perm='can_access',
description=_("Get informations about an openADS 'dossier'"),
# pylint: disable=anomalous-backslash-in-string
pattern='^(?P<type_dossier>\w+)/(?P<numero_dossier>\w+)/?$', # noqa: W605
example_pattern='{type_dossier}/{numero_dossier}',
parameters={
'type_dossier': {'description': _("Type of 'dossier'"), 'example_value': 'DIA'},
'numero_dossier': {'description': _("Identifier for 'dossier'"),
'example_value': 'DIA0130551900001'}
}
) # pylint: disable=keyword-arg-before-vararg
# pylint: disable=unused-argument
def get_dossier(self, request, type_dossier, numero_dossier, *args, **kwargs):
"""Get informations about an openADS 'dossier'."""
# make a request to openADS.API
url = urlparse.urljoin(self.openADS_API_url, 'dossier/%s/%s' % (
quote(type_dossier), quote(numero_dossier)))
response = self.requests.get(url)
# response is an error
if response.status_code // 100 != 2:
error = self.get_response_error(response)
self.logger.warning(u"Request [GET] '%s' failed with error: '%s'", url, error)
raise APIError(error)
# load the response as JSON
try:
result = response.json()
except ValueError:
raise APIError(u'No JSON content returned: %r' % response.content[:1000])
# log the response
self.log_json_payload(result, 'response')
# return the response as-is
return response.json()
@endpoint(
perm='can_access',
description=_("Get informations about the forwarding of user files to openADS"),
pattern='^(?P<numero_dossier>\w+)/?$', # noqa: W605,E501, pylint: disable=anomalous-backslash-in-string
example_pattern='{numero_dossier}/',
parameters={
'numero_dossier': {'description': _("Identifier for 'dossier'"),
'example_value': 'DIA0130551900001'},
'fichier_id': {'description': _("File identifier"),
'example_value': '78'}
}
) # pylint: disable=keyword-arg-before-vararg
# pylint: disable=unused-argument,no-self-use
def get_fwd_files(self, request, numero_dossier, fichier_id=None, *args, **kwargs):
"""Get informations about the forwarding of user files to openADS."""
payload = []
fwd_files = []
# search for all files matching the 'numero_dossier' number
if not fichier_id:
# pylint: disable=no-member
fwd_files = ForwardFile.objects.filter(numero_dossier=numero_dossier)
# search for a single file
elif fichier_id:
try:
fichier_id = int(fichier_id)
except ValueError:
raise APIError('fichier_id must be an integer')
try:
fwd_files = [ForwardFile.objects.get(id=fichier_id)] # pylint: disable=no-member
except ForwardFile.DoesNotExist: # pylint: disable=no-member
raise Http404(u"No file matches 'numero_dossier=%s' and 'id=%s'." % (
numero_dossier, fichier_id))
# append each file to the response payload
for fwd_file in fwd_files:
payload.append({
'id': fwd_file.id,
'numero_demande': fwd_file.numero_demande,
'numero_dossier': fwd_file.numero_dossier,
'type_fichier': fwd_file.type_fichier,
'file_hash': fwd_file.file_hash,
'orig_filename': fwd_file.orig_filename,
'content_type': fwd_file.content_type,
'upload_status': fwd_file.upload_status,
'upload_attempt': fwd_file.upload_attempt,
'upload_msg': fwd_file.upload_msg,
'content_size': fwd_file.upload_file.size if fwd_file.upload_file else 0,
'last_update_datetime': fwd_file.last_update_datetime
})
# return the payload containing the list of files
return payload
@endpoint(
perm='can_access',
description=_("Get informations about the forwarding of a user file to openADS"),
pattern='^(?P<numero_dossier>\w+)/?$', # noqa: W605,E501, pylint: disable=anomalous-backslash-in-string
example_pattern='{numero_dossier}/',
parameters={
'numero_dossier': {'description': _("Identifier for 'dossier'"),
'example_value': 'DIA0130551900001'},
'fichier_id': {'description': _("File identifier"),
'example_value': '78'}
}
) # pylint: disable=keyword-arg-before-vararg
# pylint: disable=unused-argument
def get_fwd_files_status(self, request, numero_dossier, fichier_id=None, *args, **kwargs):
"""Get informations about the forwarding of a user file to openADS."""
# get all files matching 'numero_dossier' and 'fichier_id'
fwd_files = self.get_fwd_files(request, numero_dossier, fichier_id)
# prepare the response payload
payload = {
'all_forwarded': True,
'pending': [],
'uploading': [],
'success': [],
'failed': []
}
# build a summary of all files statuses
for fwd_file in fwd_files:
status_msg = u'[%s] %s => %s' % (
fwd_file['id'],
fwd_file['orig_filename'],
fwd_file['upload_msg']
)
payload[fwd_file['upload_status']].append(status_msg)
if fwd_file['upload_status'] != 'success':
payload['all_forwarded'] = False
# respond with the payload
return payload
@endpoint(
perm='can_access',
description=_("Get a 'courrier' from an openADS 'dossier'"),
pattern='^(?P<type_dossier>\w+)/(?P<numero_dossier>\w+)/(?P<lettre_type>[\w.,_ -]+)/?$', # noqa: W605,E501, pylint: disable=anomalous-backslash-in-string
example_pattern='{type_dossier}/{numero_dossier}/{lettre_type}',
parameters={
'type_dossier': {'description': _("Type of 'dossier'"), 'example_value': 'DIA'},
'numero_dossier': {'description': _("Identifier for 'dossier'"),
'example_value': 'DIA0130551900001'},
'lettre_type': {'description': _("Courrier ID to get"),
'example_value': 'dia_renonciation_preempter'}
}
) # pylint: disable=keyword-arg-before-vararg
# pylint: disable=unused-argument
def get_courrier(self, request, type_dossier, numero_dossier, lettre_type, *args, **kwargs):
"""Get a 'courrier' from an openADS 'dossier'."""
# make a request to openADS.API
url = urlparse.urljoin(self.openADS_API_url, 'dossier/%s/%s/courrier/%s' % (
quote(type_dossier), quote(numero_dossier), quote(lettre_type)))
response = self.requests.get(url)
# response is an error
if response.status_code // 100 != 2:
error = self.get_response_error(response)
self.logger.warning(u"Request [GET] '%s' failed with error: '%s'", url, error)
raise APIError(error)
# load the response as JSON
try:
result = response.json()
except ValueError:
raise APIError(u'No JSON content returned: %r' % response.content[:1000])
# log the response (filtering the file content)
self.log_json_payload(result, 'response')
# get the courrier
courrier = self.get_first_file_from_payload(result, title='response')
# decode the courrier from base 64
try:
base64.b64decode(courrier['b64_content'])
except (TypeError, binascii.Error):
raise APIError('Failed to decode courrier content from base 64')
# return the 'courrier' file
return {'courrier': courrier}
def get_response_error(self, response): # pylint: disable=no-self-use
"""Return a error string from an HTTP response."""
try:
# load the response as JSON
result = response.json()
# collect errors and turn them into messages (multispaces are filtered)
errors = result.get('errors')
msg = []
if errors:
for error in errors:
location = error.get('location')
name = error.get('name')
desc = error.get('description')
msg.append(u'[%s] (%s) %s' % (location, normalize(name), normalize(desc)))
# if there are messages
if msg:
# return a string representing the HTTP error
return u"HTTP error: %s, %s" % (response.status_code, ','.join(msg))
except ValueError:
pass
# TODO ask for openADS.API to *always* send JSON formatted errors, not HTML ones
# return a string representing the HTTP error (filtering the HTML tags and multispaces)
detail = clean_spaces(strip_tags(response.content[:1000])) if response.content else ''
return u"HTTP error: %s%s" % (response.status_code, ', ' + detail if detail else '')
@endpoint(
perm='can_access',
description=_("Trigger the uploading of user's files to openADS"),
# pylint: disable=anomalous-backslash-in-string
pattern='^(?P<type_dossier>\w+)/(?P<numero_dossier>\w+)/?$', # noqa: W605
example_pattern='{type_dossier}/{numero_dossier}',
parameters={
'type_dossier': {'description': _("Type of 'dossier'"), 'example_value': 'DIA'},
'numero_dossier': {'description': _("Identifier for 'dossier'"),
'example_value': 'DIA0130551900001'},
'file_ids': {'description': _(("List of ForwardFile IDs to upload "
"(coma separated)")),
'example_value': '12,18'}
} # pylint: disable=too-many-statements,too-many-branches,too-many-locals
) # pylint: disable=keyword-arg-before-vararg
# @raise ForwareFile.DoesNotExist if not found
# pylint: disable=unused-argument
def upload_user_files(self, request, type_dossier, numero_dossier, file_ids=None,
*args, **kwargs):
"""A Job to forward user uploaded files to openADS."""
payload = []
fwd_files = []
if file_ids:
# if file_ids is a string
if isinstance(file_ids, six.string_types):
file_ids = [int(fid) for fid in file_ids.split(',')]
# invalid input
elif not isinstance(file_ids, list):
raise TypeError(
"Invalid 'file_ids' argument type '%s' "
"(must be string or list)" % type(file_ids))
# a list of ForwardFile IDs was specified
if file_ids:
# pylint: disable=no-member
fwd_files = ForwardFile.objects.filter(id__in=file_ids).all()
# check that all ids where found
fwd_files_ids = set([ff.id for ff in fwd_files])
file_ids_diff = [item for item in file_ids if item not in fwd_files_ids]
if file_ids_diff:
raise ForwardFile.DoesNotExist( # pylint: disable=no-member
"The following ForwardFile IDs were not found: %s." % file_ids_diff)
# filter out files not in status 'pending'
fwd_files_filtered = fwd_files.filter(upload_status='pending').all()
fwd_filtered_ids = set([ff.id for ff in fwd_files_filtered])
file_ids_diff = [item for item in file_ids if item not in fwd_filtered_ids]
if file_ids_diff:
self.logger.warning(
"The following ForwardFile IDs were not in status '%s' "
"when asked specificaly to upload them: %s." % ('pending', file_ids_diff))
fwd_files = fwd_files_filtered
# no files_ids where specified
else:
# process all ForwardFiles of the 'dossier' (in status 'pending')
fwd_files = ForwardFile.objects.filter( # pylint: disable=no-member
numero_dossier=numero_dossier,
upload_status='pending'
).all()
# for every file ids specified (in parameters of this job)
for fwd_file in fwd_files:
self.logger.debug(u"upload_user_files() ForwardFile file_id: %s", fwd_file.id)
# add the file content and data to the payload
payload.append({
'filename': '%s%s' % (
fwd_file.orig_filename,
'.pdf' if fwd_file.orig_filename[-4:] != '.pdf' else ''),
'content_type': fwd_file.content_type,
'b64_content': base64.b64encode(fwd_file.upload_file.read()),
'file_type': fwd_file.type_fichier
})
self.logger.debug("upload_user_files() payload added")
# update the file upload data (status and attempts)
fwd_file.upload_status = 'uploading'
fwd_file.upload_attempt += 1
fwd_file.upload_msg = 'attempt %s' % fwd_file.upload_attempt
self.logger.debug(u"upload_user_files() upload_msg: '%s'", fwd_file.upload_msg)
fwd_file.save()
self.logger.debug("upload_user_files() ForwardFile saved")
# if files need to be forwarded
if payload:
self.logger.debug("upload_user_files() payload is not empty")
# log the payload
self.log_json_payload(payload, 'payload')
# make the request to openADS.API (with a specific timeout)
url = urlparse.urljoin(self.openADS_API_url, 'dossier/%s/%s/files' % (
quote(type_dossier), quote(numero_dossier)))
response = self.requests.post(
url,
json=payload,
timeout=self.openADS_API_timeout
)
# reponse is an error
if response.status_code // 100 != 2:
error = self.get_response_error(response)
self.logger.warning(u"Request [POST] '%s' failed with error: '%s'", url, error)
# update every files status as 'failed' and save the error message
for fwd_file in fwd_files:
fwd_file.upload_status = 'failed'
fwd_file.upload_msg = self.get_response_error(response)
fwd_file.save()
# log (warning) the error message
self.logger.warning((u"upload_user_files() openADS response is not OK "
"(code: %s) for dossier '%s' and files '%s'"),
response.status_code,
numero_dossier,
file_ids)
# respond with APIError
if request:
raise APIError(error)
# response is not an error
else:
# load the reponse as JSON
try:
response.json()
# in case of failure
except ValueError:
# update every files status as 'failed' and save the error message
for fwd_file in fwd_files:
fwd_file.upload_status = 'failed'
fwd_file.upload_msg = u'No JSON content returned: %r' % (
response.content[:1000])
fwd_file.save()
# log (warning) the error message
self.logger.warning((u"upload_user_files() openADS response is not JSON valid "
"for dossier '%s' and files '%s'"),
numero_dossier,
fwd_files)
# respond with APIError
if request:
raise APIError(u'No JSON content returned: %r' % response.content[:1000])
# response correctly loaded as JSON
else:
# TODO handle response (now its just an informational sentence in key 'data')
# update every files status as 'success' and save the success message
for fwd_file in fwd_files:
fwd_file.upload_status = 'success'
fwd_file.upload_msg = 'uploaded successfuly'
# save the file (content will be deleted automatically)
fpath = fwd_file.upload_file.path
fwd_file.save()
# log the success message
self.logger.debug(
u"upload_user_files() flaging file '%s' as 'transfered' (deleted '%s')",
fwd_file.id,
fpath
)
# respond with success
if request:
return {'message': 'all files transfered successfully'}
# no file need to be forwarded
else:
self.logger.warning(
u"upload_user_files() payload is empty for dossier '%s' and files '%s'",
numero_dossier,
file_ids
)
# respond with message
if request:
return {'message': 'no file to transfer'}
# return something to please pylint
return True
# pylint: disable=keyword-arg-before-vararg
@endpoint(
perm='can_access',
description=_("Get the type of 'courrier' of an openADS 'dossier'"),
pattern='^(?P<type_dossier>\w+)/?$', # noqa: W605,E501, pylint: disable=anomalous-backslash-in-string
example_pattern='{type_dossier}/',
parameters={
'type_dossier': {'description': _("Type of 'dossier'"), 'example_value': 'DIA'},
'type_dossier_detaille': {'description': _("Detailled type of 'dossier'"),
'example_value': 'CUb'},
'specific': {'description': _("Get a specific version of the 'courrier type'"),
'example_value': 'refus'},
} # pylint: disable=too-many-branches
) # pylint: disable=no-self-use
# pylint: disable=unused-argument
def get_courrier_type(self, request, type_dossier, type_dossier_detaille=None, specific=None,
*args, **kwargs):
"""Get the type of 'courrier' for an openADS 'dossier'."""
courrier_type = None
if type_dossier == 'DIA':
courrier_type = 'dia_renonciation_preempter'
if specific == 'delegation':
courrier_type = 'dia_delegation'
elif specific == 'irrecevabilite':
courrier_type = 'dia_irrecevabilite'
elif specific == 'preemption':
courrier_type = 'dia_souhait_preempter'
elif type_dossier == 'CU':
courrier_type = 'CUa'
if type_dossier_detaille == 'CUb':
courrier_type = 'CUb - ACCORD'
if specific == 'refus':
courrier_type = 'CUb - REFUS'
elif type_dossier == 'DP':
courrier_type = 'decision_nom_opposition_sr'
if specific == 'reserves':
courrier_type = 'decision_non_opposition_DP'
elif specific == 'annulation':
courrier_type = 'arrete_annulation_DP'
elif specific == 'prolongation':
courrier_type = 'arrete_prorogation_DP'
elif specific == 'prolongation_refus':
courrier_type = 'arrete_refus_prorogation_DP'
elif specific == 'transfert_refus':
courrier_type = 'arrete_refus_transfert_DP'
elif specific == 'transfert':
courrier_type = 'arrete_transfert_DP'
elif specific == 'attestation_tacite':
courrier_type = 'attestation_non opposition_tacite_DP'
elif specific == 'opposition':
courrier_type = 'decision_opposition_DP'
elif specific == 'certificat_tacite':
courrier_type = 'certificat_opposition_tacite_DP'
elif type_dossier == 'PC' or type_dossier == 'PI':
courrier_type = 'arrete_ss_reserves'
if specific == 'refus':
courrier_type = 'arrete_refus'
# TODO add others type and specifics variants
return {'courrier_type': courrier_type}