806 lines
33 KiB
Python
806 lines
33 KiB
Python
# -*- coding: utf-8 -*-
|
|
# passerelle - uniform access to multiple data sources and services
|
|
# Copyright (C) 2019 Entr'ouvert
|
|
#
|
|
# This program is free software: you can redistribute it and/or modify it
|
|
# under the terms of the GNU Affero General Public License as published
|
|
# by the Free Software Foundation, either version 3 of the License, or
|
|
# (at your option) any later version.
|
|
#
|
|
# This program is distributed in the hope that it will be useful,
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
# GNU Affero General Public License for more details.
|
|
#
|
|
# You should have received a copy of the GNU Affero General Public License
|
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
import base64
|
|
import collections
|
|
import datetime
|
|
import os
|
|
import re
|
|
import stat
|
|
import zipfile
|
|
|
|
from django.contrib.postgres.fields import JSONField
|
|
from django.core.files import File
|
|
from django.db import models, transaction
|
|
from django.template import engines
|
|
from django.urls import reverse
|
|
from django.utils import six
|
|
from django.utils.translation import ugettext
|
|
from django.utils.translation import ugettext_lazy as _
|
|
from lxml import etree as ET
|
|
|
|
from passerelle.base.models import BaseResource
|
|
from passerelle.utils.api import endpoint
|
|
from passerelle.utils.conversion import normalize
|
|
from passerelle.utils.sftp import SFTPField
|
|
from passerelle.utils.wcs import FormDefField, get_wcs_choices
|
|
from passerelle.utils.xml import text_content
|
|
|
|
from .xsd import Schema
|
|
|
|
MAX_REQUESTS_PER_ITERATION = 200
|
|
|
|
PROCEDURE_DOC = 'DOC'
|
|
PROCEDURE_RCO = 'recensementCitoyen'
|
|
PROCEDURE_DDPACS = 'depotDossierPACS'
|
|
PROCEDURES = [
|
|
(PROCEDURE_DOC, _('Request for construction site opening')),
|
|
(PROCEDURE_RCO, _('Request for mandatory citizen census')),
|
|
(PROCEDURE_DDPACS, _('Pre-request for citizen solidarity pact')),
|
|
]
|
|
|
|
FILE_PATTERN = re.compile(r'^(?P<identifier>.*)-(?P<procedure>[a-zA-Z0-9]+)-(?P<sequence>\d+).zip$')
|
|
ENT_PATTERN = re.compile(r'^.*-ent-\d+(?:-.*)?.xml$')
|
|
NSMAP = {'dgme-metier': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier'}
|
|
ROUTAGE_XPATH = ET.XPath(
|
|
('dgme-metier:Routage/dgme-metier:Donnee/dgme-metier:Valeur/text()'), namespaces=NSMAP
|
|
)
|
|
|
|
EMAIL_XPATH = ET.XPath(('dgme-metier:Teledemarche/dgme-metier:Email/text()'), namespaces=NSMAP)
|
|
|
|
DOCUMENTS_XPATH = ET.XPath('dgme-metier:Document', namespaces=NSMAP)
|
|
PIECE_JOINTE_XPATH = ET.XPath('dgme-metier:PieceJointe', namespaces=NSMAP)
|
|
CODE_XPATH = ET.XPath('dgme-metier:Code', namespaces=NSMAP)
|
|
FICHIER_XPATH = ET.XPath('dgme-metier:Fichier', namespaces=NSMAP)
|
|
FICHIER_DONNEES_XPATH = ET.XPath('.//dgme-metier:FichierDonnees', namespaces=NSMAP)
|
|
|
|
ET.register_namespace(
|
|
'dgme-metier', 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier'
|
|
)
|
|
|
|
|
|
def simplify(s):
|
|
'''Simplify XML node tag names because XSD from DGME are garbage'''
|
|
if not s:
|
|
return ''
|
|
if not isinstance(s, six.text_type):
|
|
s = six.text_type(s, 'utf-8', 'ignore')
|
|
s = normalize(s)
|
|
s = re.sub(r'[^\w\s\'-_]', '', s)
|
|
s = s.replace('-', '_')
|
|
s = re.sub(r'[\s\']+', '', s)
|
|
return s.strip().lower()
|
|
|
|
|
|
class Resource(BaseResource):
|
|
category = _('Business Process Connectors')
|
|
|
|
input_sftp = SFTPField(verbose_name=_('Input SFTP URL'), null=True)
|
|
|
|
output_sftp = SFTPField(verbose_name=_('Output SFTP URL'), null=True)
|
|
|
|
def check_status(self):
|
|
with self.input_sftp.client() as sftp:
|
|
sftp.listdir()
|
|
with self.output_sftp.client() as sftp:
|
|
sftp.listdir()
|
|
get_wcs_choices(session=self.requests)
|
|
|
|
@endpoint(name='ping', show=False, description=_('Check SFTP availability'))
|
|
def ping(self, request):
|
|
# deprecated endpoint
|
|
self.check_status()
|
|
return {'err': 0}
|
|
|
|
def hourly(self):
|
|
self.run_loop()
|
|
|
|
def run_loop(self, count=0):
|
|
if count == 0:
|
|
count = MAX_REQUESTS_PER_ITERATION
|
|
with transaction.atomic():
|
|
# lock resource
|
|
r = Resource.objects.select_for_update(skip_locked=True).filter(pk=self.pk)
|
|
if not r:
|
|
# already locked
|
|
self.logger.info('did nothing')
|
|
return
|
|
with self.input_sftp.client() as sftp:
|
|
try:
|
|
sftp.lstat('DONE')
|
|
except IOError:
|
|
sftp.mkdir('DONE')
|
|
|
|
try:
|
|
sftp.lstat('FAILED')
|
|
except IOError:
|
|
sftp.mkdir('FAILED')
|
|
|
|
def helper():
|
|
for file_stat in sftp.listdir_attr():
|
|
if stat.S_ISDIR(file_stat.st_mode):
|
|
continue
|
|
yield file_stat.filename
|
|
|
|
for filename in helper():
|
|
m = FILE_PATTERN.match(filename)
|
|
if not m:
|
|
self.logger.info(
|
|
'file "%s" did not match pattern %s, moving to FAILED/', filename, FILE_PATTERN
|
|
)
|
|
sftp.rename(filename, 'FAILED/' + filename)
|
|
continue
|
|
procedure = m.group('procedure')
|
|
try:
|
|
mapping = self.mappings.get(procedure=procedure)
|
|
except Mapping.DoesNotExist:
|
|
self.logger.info(
|
|
'no mapping for procedure "%s" for file "%s", moving to FAILED/',
|
|
procedure,
|
|
filename,
|
|
)
|
|
continue
|
|
|
|
handler = self.FileHandler(
|
|
resource=self,
|
|
sftp=sftp,
|
|
filename=filename,
|
|
identifier=m.group('identifier'),
|
|
procedure=procedure,
|
|
sequence=m.group('sequence'),
|
|
mapping=mapping,
|
|
)
|
|
if not handler.request:
|
|
count -= 1
|
|
try:
|
|
move, error = handler()
|
|
except Exception:
|
|
count -= 1
|
|
self.logger.exception('handling of file "%s" failed', filename)
|
|
sftp.rename(filename, 'FAILED/' + filename)
|
|
else:
|
|
if move and error:
|
|
count -= 1
|
|
self.logger.error('handling of file "%s" failed: %s', filename, error)
|
|
sftp.rename(filename, 'FAILED/' + filename)
|
|
else:
|
|
if error:
|
|
count -= 1
|
|
self.logger.warning('handling of file "%s" failed: %s', filename, error)
|
|
elif move:
|
|
count -= 1
|
|
sftp.rename(filename, 'DONE/' + filename)
|
|
if not count:
|
|
break
|
|
|
|
class FileHandler(object):
|
|
def __init__(self, resource, sftp, filename, identifier, procedure, sequence, mapping):
|
|
self.resource = resource
|
|
self.sftp = sftp
|
|
self.filename = filename
|
|
self.identifier = identifier
|
|
self.procedure = procedure
|
|
self.sequence = sequence
|
|
self.mapping = mapping
|
|
self.variables = list(self.mapping.variables)
|
|
self.request = Request.objects.filter(resource=resource, filename=filename).first()
|
|
|
|
def __call__(self):
|
|
if not self.request:
|
|
with self.sftp.open(self.filename) as fd:
|
|
with transaction.atomic():
|
|
self.request = Request.objects.create(resource=self.resource, filename=self.filename)
|
|
self.request.state = Request.STATE_RECEIVED
|
|
self.request.archive.save(self.filename, File(fd))
|
|
if self.request.state == Request.STATE_RECEIVED:
|
|
with self.request.archive as fd:
|
|
# error during processing are fatal, we want to log them
|
|
data, error = self.process(fd)
|
|
if not data:
|
|
return False, error
|
|
try:
|
|
backoffice_url = self.transfer(data)
|
|
except Exception as e:
|
|
return False, 'error during transfer to w.c.s %r' % e
|
|
self.request.url = backoffice_url
|
|
self.request.state = Request.STATE_TRANSFERED
|
|
self.request.save()
|
|
|
|
if self.request.state == Request.STATE_TRANSFERED:
|
|
try:
|
|
self.response()
|
|
except Exception as e:
|
|
return False, 'error during response to service-public.fr %r' % e
|
|
self.request.state = Request.STATE_RETURNED
|
|
self.request.save()
|
|
self.resource.logger.info('%s responded, closed', self.request.filename)
|
|
return True, None
|
|
|
|
def process(self, fd):
|
|
try:
|
|
with zipfile.ZipFile(fd) as archive:
|
|
# sort files
|
|
doc_files = []
|
|
ent_files = []
|
|
attachments = {}
|
|
for name in archive.namelist():
|
|
if ENT_PATTERN.match(name):
|
|
ent_files.append(name)
|
|
|
|
if len(ent_files) != 1:
|
|
return False, 'too many/few ent files found: %s' % ent_files
|
|
|
|
ent_file = ent_files[0]
|
|
|
|
with archive.open(ent_file) as fd:
|
|
document = ET.parse(fd)
|
|
|
|
for pj_node in PIECE_JOINTE_XPATH(document):
|
|
code = CODE_XPATH(pj_node)[0].text
|
|
code = 'pj_' + code.lower().replace('-', '_')
|
|
fichier = FICHIER_XPATH(pj_node)[0].text
|
|
attachments.setdefault(code, []).append(fichier)
|
|
for doc_node in DOCUMENTS_XPATH(document):
|
|
code = CODE_XPATH(doc_node)[0].text
|
|
code = 'doc_' + code.lower().replace('-', '_')
|
|
fichier = FICHIER_DONNEES_XPATH(doc_node)[0].text
|
|
attachments.setdefault(code, []).append(fichier)
|
|
|
|
doc_files = [
|
|
value for l in attachments.values() for value in l if value.lower().endswith('.xml')
|
|
]
|
|
if len(doc_files) != 1:
|
|
return False, 'too many/few doc files found: %s' % doc_files
|
|
|
|
for key in attachments:
|
|
if len(attachments[key]) > 1:
|
|
return False, 'too many attachments of kind %s: %r' % (key, attachments[key])
|
|
name = attachments[key][0]
|
|
with archive.open(attachments[key][0]) as zip_fd:
|
|
content = zip_fd.read()
|
|
attachments[key] = {
|
|
'filename': name,
|
|
'content': base64.b64encode(content).decode('ascii'),
|
|
'content_type': 'application/octet-stream',
|
|
}
|
|
|
|
if self.procedure == PROCEDURE_RCO and not attachments:
|
|
return False, 'no attachments but RCO requires them'
|
|
|
|
doc_file = doc_files[0]
|
|
|
|
insee_codes = ROUTAGE_XPATH(document)
|
|
if len(insee_codes) != 1:
|
|
return False, 'too many/few insee codes found: %s' % insee_codes
|
|
insee_code = insee_codes[0]
|
|
|
|
email = EMAIL_XPATH(document)
|
|
email = email[0] if email else ''
|
|
|
|
data = {
|
|
'insee_code': insee_code,
|
|
'email': email,
|
|
}
|
|
data.update(attachments)
|
|
|
|
with archive.open(doc_file) as fd:
|
|
document = ET.parse(fd)
|
|
data.update(self.extract_data(document))
|
|
if hasattr(self, 'update_data_%s' % self.procedure):
|
|
getattr(self, 'update_data_%s' % self.procedure)(data)
|
|
except zipfile.BadZipfile:
|
|
return False, 'could not load zipfile'
|
|
return data, None
|
|
|
|
def transfer(self, data):
|
|
formdef = self.mapping.formdef
|
|
formdef.session = self.resource.requests
|
|
|
|
with formdef.submit() as submitter:
|
|
submitter.submission_channel = 'web'
|
|
submitter.submission_context = {
|
|
'mdel_procedure': self.procedure,
|
|
'mdel_identifier': self.identifier,
|
|
'mdel_sequence': self.sequence,
|
|
}
|
|
fields = self.mapping.rules.get('fields', {})
|
|
for name in fields:
|
|
field = fields[name]
|
|
variable = field['variable']
|
|
expression = field['expression']
|
|
value = data.get(variable)
|
|
if expression.strip():
|
|
template = engines['django'].from_string(expression)
|
|
context = data.copy()
|
|
context['value'] = value
|
|
value = template.render(context)
|
|
if not value:
|
|
continue
|
|
submitter.set(name, value)
|
|
return submitter.result.backoffice_url
|
|
|
|
def response(self):
|
|
with self.resource.output_sftp.client() as client:
|
|
with client.open(self.request.response_zip_filename, mode='w') as fd:
|
|
self.request.build_response_zip(
|
|
fd, etat='100', commentaire='Demande transmise à la collectivité'
|
|
)
|
|
with self.resource.input_sftp.client() as client:
|
|
with client.open('DONE/' + self.request.response_zip_filename, mode='w') as fd:
|
|
self.request.build_response_zip(
|
|
fd, etat='100', commentaire='Demande transmise à la collectivité'
|
|
)
|
|
|
|
def get_data(self, data, name):
|
|
# prevent error in manual mapping
|
|
assert name in self.variables, 'variable "%s" is unknown' % name
|
|
return data.get(name, '')
|
|
|
|
def update_data_DOC(self, data):
|
|
def get(name):
|
|
return self.get_data(data, name)
|
|
|
|
numero_permis_construire = get('doc_declarant_designation_permis_numero_permis_construire')
|
|
numero_permis_amenager = get('doc_declarant_designation_permis_numero_permis_amenager')
|
|
data['type_permis'] = (
|
|
'Un permis de construire' if numero_permis_construire else 'Un permis d\'aménager'
|
|
)
|
|
data['numero_permis'] = numero_permis_construire or numero_permis_amenager
|
|
particulier = get('doc_declarant_identite_type_personne').strip().lower() == 'true'
|
|
data['type_declarant'] = 'Un particulier' if particulier else 'Une personne morale'
|
|
if particulier:
|
|
data['nom'] = get('doc_declarant_identite_personne_physique_nom')
|
|
data['prenoms'] = get('doc_declarant_identite_personne_physique_prenom')
|
|
else:
|
|
data['nom'] = get('doc_declarant_identite_personne_morale_representant_personne_morale_nom')
|
|
data['prenoms'] = get(
|
|
'doc_declarant_identite_personne_morale_representant_personne_morale_prenom'
|
|
)
|
|
mapping = {
|
|
'1000': 'Monsieur',
|
|
'1001': 'Madame',
|
|
'1002': 'Madame et Monsieur',
|
|
}
|
|
if particulier:
|
|
data['civilite_particulier'] = mapping.get(
|
|
get('doc_declarant_identite_personne_physique_civilite'), ''
|
|
)
|
|
else:
|
|
data['civilite_pm'] = mapping.get(
|
|
get('doc_declarant_identite_personne_morale_representant_personne_morale_civilite'), ''
|
|
)
|
|
data['portee'] = (
|
|
'Pour la totalité des travaux'
|
|
if get('doc_ouverture_chantier_totalite_travaux').lower().strip() == 'true'
|
|
else 'Pour une tranche des travaux'
|
|
)
|
|
|
|
def update_data_recensementCitoyen(self, data):
|
|
def get(name):
|
|
return self.get_data(data, name)
|
|
|
|
motif = get('recensementcitoyen_formalite_formalitemotifcode_1') or get(
|
|
'recensementcitoyen_formalite_formalitemotifcode_2'
|
|
)
|
|
data['motif'] = {'RECENSEMENT': '1', 'EXEMPTION': '2'}[motif]
|
|
if data['motif'] == '2':
|
|
data['motif_exempte'] = (
|
|
"Titulaire d'une carte d'invalidité de 80% minimum"
|
|
if get('recensementcitoyen_formalite_formalitemotifcode_2') == 'INFIRME'
|
|
else "Autre situation"
|
|
)
|
|
data['justificatif_exemption'] = get('pj_je')
|
|
data['double_nationalite'] = 'Oui' if get('recensementcitoyen_personne_nationalite') else 'Non'
|
|
data['residence_differente'] = (
|
|
'Oui' if get('recensementcitoyen_personne_adresseresidence_localite') else 'Non'
|
|
)
|
|
data['civilite'] = 'Monsieur' if get('recensementcitoyen_personne_civilite') == 'M' else 'Madame'
|
|
|
|
def get_lieu_naissance(variable, code):
|
|
for idx in ['', '_1', '_2']:
|
|
v = variable + idx
|
|
if get(v + '_code') == code:
|
|
return get(v + '_nom')
|
|
|
|
data['cp_naissance'] = get_lieu_naissance('recensementcitoyen_personne_lieunaissance', 'AUTRE')
|
|
data['commune_naissance'] = get_lieu_naissance(
|
|
'recensementcitoyen_personne_lieunaissance', 'COMMUNE'
|
|
)
|
|
data['justificatif_identite'] = get('pj_ji')
|
|
situation_matrimoniale = get('recensementcitoyen_personne_situationfamille_situationmatrimoniale')
|
|
data['situation_familiale'] = {
|
|
'Célibataire': 'Célibataire',
|
|
'Marié': 'Marié(e)',
|
|
}.get(situation_matrimoniale, 'Autres')
|
|
if data['situation_familiale'] == 'Autres':
|
|
data['situation_familiale_precision'] = situation_matrimoniale
|
|
pupille = get('recensementcitoyen_personne_situationfamille_pupille')
|
|
data['pupille'] = 'Oui' if pupille else 'Non'
|
|
data['pupille_categorie'] = {
|
|
'NATION': "Pupille de la nation",
|
|
'ETAT': "Pupille de l'État",
|
|
}.get(pupille)
|
|
for idx in ['', '_1', '_2']:
|
|
code = get('recensementcitoyen_personne_methodecontact%s_canalcode' % idx)
|
|
uri = get('recensementcitoyen_personne_methodecontact%s_uri' % idx)
|
|
if code == 'EMAIL':
|
|
data['courriel'] = uri
|
|
if code == 'TEL':
|
|
data['telephone_fixe'] = uri
|
|
data['justificatif_famille'] = get('pj_jf')
|
|
data['filiation_inconnue_p1'] = not get('recensementcitoyen_filiationpere_nomfamille')
|
|
data['filiation_inconnue_p2'] = not get('recensementcitoyen_filiationmere_nomfamille')
|
|
data['cp_naissance_p1'] = get_lieu_naissance(
|
|
'recensementcitoyen_filiationpere_lieunaissance', 'AUTRE'
|
|
)
|
|
data['cp_naissance_p2'] = get_lieu_naissance(
|
|
'recensementcitoyen_filiationmere_lieunaissance', 'AUTRE'
|
|
)
|
|
data['commune_naissance_p1'] = get_lieu_naissance(
|
|
'recensementcitoyen_filiationpere_lieunaissance', 'COMMUNE'
|
|
)
|
|
data['commune_naissance_p2'] = get_lieu_naissance(
|
|
'recensementcitoyen_filiationmere_lieunaissance', 'COMMUNE'
|
|
)
|
|
for key in data:
|
|
if key.endswith('_datenaissance') and data[key]:
|
|
data[key] = datetime.datetime.strptime(data[key], '%d/%m/%Y').date().strftime('%Y-%m-%d')
|
|
|
|
def update_data_depotDossierPACS(self, data):
|
|
def get(name):
|
|
return self.get_data(data, name)
|
|
|
|
civilite_p1 = get('pacs_partenaire1_civilite')
|
|
data['civilite_p1'] = 'Monsieur' if civilite_p1 == 'M' else 'Madame'
|
|
data['acte_naissance_p1'] = get('pj_an')
|
|
data['identite_verifiee_p1'] = (
|
|
'Oui' if get('pacs_partenaire1_titreidentiteverifie') == 'true' else 'Non'
|
|
)
|
|
|
|
civilite_p2 = get('pacs_partenaire2_civilite')
|
|
data['civilite_p2'] = 'Monsieur' if civilite_p2 == 'M' else 'Madame'
|
|
data['acte_naissance_p2'] = get('pj_anp')
|
|
data['identite_verifiee_p2'] = (
|
|
'Oui' if get('pacs_partenaire2_titreidentiteverifie') == 'true' else 'Non'
|
|
)
|
|
|
|
data['type_convention'] = '2' if get('pacs_convention_conventionspecifique') == 'true' else '1'
|
|
data['aide_materielle'] = (
|
|
'1'
|
|
if get('pacs_convention_conventiontype_aidemateriel_typeaidemateriel') == 'aideProportionnel'
|
|
else '2'
|
|
)
|
|
data['regime'] = '1' if get('pacs_convention_conventiontype_regimepacs') == 'legal' else '2'
|
|
data['convention_specifique'] = get('pj_cp')
|
|
|
|
def extract_data(self, document):
|
|
'''Convert XML into a dictionnary of values'''
|
|
root = document.getroot()
|
|
|
|
def tag_name(node):
|
|
return simplify(ET.QName(node.tag).localname)
|
|
|
|
def helper(path, node):
|
|
if len(node):
|
|
tags = collections.Counter(tag_name(child) for child in node)
|
|
counter = collections.Counter()
|
|
for child in node:
|
|
name = tag_name(child)
|
|
if tags[name] > 1:
|
|
counter[name] += 1
|
|
name += '_%s' % counter[name]
|
|
for p, value in helper(path + [name], child):
|
|
yield p, value
|
|
else:
|
|
yield path, text_content(node)
|
|
# case of multiple nodes
|
|
new_path = path[:-1] + [path[-1] + '_1']
|
|
yield new_path, text_content(node)
|
|
|
|
return {'_'.join(path): value for path, value in helper([tag_name(root)], root)}
|
|
|
|
def export_json(self):
|
|
d = super().export_json()
|
|
d['mappings'] = [mapping.export_json() for mapping in self.mappings.all()]
|
|
return d
|
|
|
|
@classmethod
|
|
def import_json_real(self, overwrite, instance, d, **kwargs):
|
|
mappings_json = d.pop('mappings', [])
|
|
instance = super().import_json_real(overwrite, instance, d, **kwargs)
|
|
if instance and overwrite:
|
|
instance.mappings.all().delete()
|
|
for mapping_json in mappings_json:
|
|
Mapping.import_json(mapping_json, instance)
|
|
return instance
|
|
|
|
class Meta:
|
|
verbose_name = _('Service-Public.fr')
|
|
|
|
|
|
def default_rule():
|
|
return {}
|
|
|
|
|
|
@six.python_2_unicode_compatible
|
|
class Mapping(models.Model):
|
|
resource = models.ForeignKey(
|
|
Resource, verbose_name=_('Resource'), related_name='mappings', on_delete=models.CASCADE
|
|
)
|
|
|
|
procedure = models.CharField(verbose_name=_('Procedure'), choices=PROCEDURES, unique=True, max_length=32)
|
|
|
|
formdef = FormDefField(verbose_name=_('Formdef'))
|
|
|
|
rules = JSONField(verbose_name=_('Rules'), default=default_rule)
|
|
|
|
def get_absolute_url(self):
|
|
return reverse('sp-fr-mapping-edit', kwargs=dict(slug=self.resource.slug, pk=self.pk))
|
|
|
|
@property
|
|
def xsd(self):
|
|
path = os.path.join(os.path.dirname(__file__), '%s.XSD' % self.procedure)
|
|
with open(path, 'rb') as fd:
|
|
doc = ET.parse(fd)
|
|
schema = Schema()
|
|
schema.visit(doc.getroot())
|
|
return schema
|
|
|
|
@property
|
|
def variables(self):
|
|
yield 'insee_code'
|
|
yield 'email'
|
|
for path, dummy in self.xsd.paths():
|
|
names = [simplify(tag.localname) for tag in path]
|
|
yield '_'.join(names)
|
|
if hasattr(self, 'variables_%s' % self.procedure):
|
|
for variable in getattr(self, 'variables_%s' % self.procedure):
|
|
yield variable
|
|
|
|
@property
|
|
def variables_DOC(self):
|
|
yield 'type_permis'
|
|
yield 'numero_permis'
|
|
yield 'type_declarant'
|
|
yield 'nom'
|
|
yield 'prenoms'
|
|
yield 'civilite_particulier'
|
|
yield 'civilite_pm'
|
|
yield 'portee'
|
|
|
|
@property
|
|
def variables_recensementCitoyen(self):
|
|
yield 'motif'
|
|
yield 'motif_exempte'
|
|
yield 'justificatif_exemption'
|
|
yield 'double_nationalite'
|
|
yield 'residence_differente'
|
|
yield 'civilite'
|
|
yield 'cp_naissance'
|
|
yield 'commune_naissance'
|
|
yield 'pj_je'
|
|
yield 'pj_ji'
|
|
yield 'situation_familiale'
|
|
yield 'situation_familiale_precision'
|
|
yield 'pupille'
|
|
yield 'pupille_categorie'
|
|
yield 'courriel'
|
|
yield 'telephone_fixe'
|
|
yield 'pj_jf'
|
|
yield 'filiation_inconnue_p1'
|
|
yield 'filiation_inconnue_p2'
|
|
yield 'cp_naissance_p1'
|
|
yield 'cp_naissance_p2'
|
|
yield 'commune_naissance_p1'
|
|
yield 'commune_naissance_p2'
|
|
|
|
@property
|
|
def variables_depotDossierPACS(self):
|
|
yield 'pj_an'
|
|
yield 'pj_anp'
|
|
yield 'pj_cp'
|
|
yield 'doc_15725_01'
|
|
yield 'doc_flux_pacs'
|
|
yield 'doc_recappdf'
|
|
yield 'civilite_p1'
|
|
yield 'acte_naissance_p1'
|
|
yield 'identite_verifiee_p1'
|
|
|
|
yield 'civilite_p2'
|
|
yield 'acte_naissance_p2'
|
|
yield 'identite_verifiee_p2'
|
|
|
|
yield 'type_convention'
|
|
yield 'aide_materielle'
|
|
yield 'regime'
|
|
yield 'convention_specifique'
|
|
|
|
def __str__(self):
|
|
return ugettext('Mapping from "{procedure}" to formdef "{formdef}"').format(
|
|
procedure=self.get_procedure_display(), formdef=self.formdef.title if self.formdef else '-'
|
|
)
|
|
|
|
def export_json(self):
|
|
return {
|
|
'procedure': self.procedure,
|
|
'formdef': str(self.formdef),
|
|
'rules': self.rules,
|
|
}
|
|
|
|
@classmethod
|
|
def import_json(cls, d, resource):
|
|
mapping = cls.objects.filter(resource=resource, procedure=d['procedure']).first() or cls(
|
|
resource=resource, procedure=d['procedure']
|
|
)
|
|
mapping.formdef = d['formdef']
|
|
mapping.rules = d['rules']
|
|
mapping.save()
|
|
return mapping
|
|
|
|
class Meta:
|
|
verbose_name = _('MDEL mapping')
|
|
verbose_name_plural = _('MDEL mappings')
|
|
|
|
|
|
class Request(models.Model):
|
|
# To prevent mixing errors from analysing archive from s-p.fr and errors
|
|
# from pushing to w.c.s we separate processing with three steps:
|
|
# - receiving, i.e. copying zipfile from SFTP and storing them locally
|
|
# - processing, i.e. openeing the zipfile and extracting content as we need it
|
|
# - transferring, pushing content as a new form in w.c.s.
|
|
STATE_RECEIVED = 'received'
|
|
STATE_TRANSFERED = 'transfered'
|
|
STATE_RETURNED = 'returned'
|
|
STATE_ERROR = 'error'
|
|
STATES = [
|
|
(STATE_RECEIVED, _('Received')),
|
|
(STATE_TRANSFERED, _('Transferred')),
|
|
(STATE_ERROR, _('Error')),
|
|
(STATE_RETURNED, _('Returned')),
|
|
]
|
|
|
|
resource = models.ForeignKey(Resource, verbose_name=_('Resource'), on_delete=models.CASCADE)
|
|
|
|
created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(verbose_name=_('Created'), auto_now=True)
|
|
|
|
filename = models.CharField(verbose_name=_('Identifier'), max_length=128)
|
|
|
|
archive = models.FileField(verbose_name=_('Archive'), max_length=256)
|
|
|
|
state = models.CharField(verbose_name=_('State'), choices=STATES, default=STATE_RECEIVED, max_length=16)
|
|
|
|
url = models.URLField(verbose_name=_('URL'), blank=True)
|
|
|
|
def delete(self, *args, **kwargs):
|
|
try:
|
|
self.archive.delete()
|
|
except Exception:
|
|
self.resource.logger.error('could not delete %s', self.archive)
|
|
return super().delete(*args, **kwargs)
|
|
|
|
@property
|
|
def message_xml(self):
|
|
# FileField can be closed, or open, you never know, and used as a
|
|
# contextmanager, __enter__ does not re-open/re-seek(0) it :/
|
|
self.archive.open()
|
|
|
|
with self.archive as fd:
|
|
with zipfile.ZipFile(fd) as archive:
|
|
with archive.open('message.xml') as message_xml_fd:
|
|
s = message_xml_fd.read()
|
|
return ET.fromstring(s)
|
|
|
|
@property
|
|
def id_enveloppe(self):
|
|
message_xml = self.message_xml
|
|
ns = {
|
|
'pec': 'http://finances.gouv.fr/dgme/pec/message/v1',
|
|
'mdel': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier',
|
|
}
|
|
return message_xml.find('.//{%(pec)s}MessageId' % ns).text.split()[1]
|
|
|
|
def build_message_xml_retour(self, etat, commentaire):
|
|
message_xml = self.message_xml
|
|
|
|
ns = {
|
|
'pec': 'http://finances.gouv.fr/dgme/pec/message/v1',
|
|
'mdel': 'http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier',
|
|
}
|
|
|
|
template = '''<ns2:Message xmlns:ns2="http://finances.gouv.fr/dgme/pec/message/v1" xmlns="http://finances.gouv.fr/dgme/gf/composants/teledemarchexml/donnee/metier">
|
|
<ns2:Header>
|
|
<ns2:Routing>
|
|
<ns2:MessageId/>
|
|
<ns2:RefToMessageId/>
|
|
<ns2:FlowType/>
|
|
<ns2:Sender/>
|
|
<ns2:Recipients>
|
|
<ns2:Recipient/>
|
|
</ns2:Recipients>
|
|
</ns2:Routing>
|
|
<ns2:Security>
|
|
<ns2:Horodatage>false</ns2:Horodatage>
|
|
</ns2:Security>
|
|
</ns2:Header>
|
|
<ns2:Body>
|
|
<ns2:Content><ns2:Retour>
|
|
<ns2:Enveloppe>
|
|
<ns2:NumeroTeledemarche/>
|
|
<ns2:MotDePasse/>
|
|
</ns2:Enveloppe>
|
|
<ns2:Instruction>
|
|
<ns2:Maj>
|
|
<ns2:Etat/>
|
|
<ns2:Commentaire/>
|
|
</ns2:Maj>
|
|
</ns2:Instruction>
|
|
</ns2:Retour>
|
|
</ns2:Content>
|
|
</ns2:Body>
|
|
</ns2:Message>''' # NOQA E501
|
|
|
|
response = ET.XML(template)
|
|
|
|
message_id = message_xml.find('.//{%(pec)s}MessageId' % ns).text
|
|
# maybe could work with str(uuid.uuid4().hex), which would be more unique, we will never know
|
|
response.find('.//{%(pec)s}MessageId' % ns).text = 'RET-1-' + message_id
|
|
response.find('.//{%(pec)s}RefToMessageId' % ns).text = message_id
|
|
response.find('.//{%(pec)s}FlowType' % ns).text = message_xml.find('.//{%(pec)s}FlowType' % ns).text
|
|
response.find('.//{%(pec)s}Sender' % ns).extend(message_xml.find('.//{%(pec)s}Recipient' % ns))
|
|
response.find('.//{%(pec)s}Recipient' % ns).extend(message_xml.find('.//{%(pec)s}Sender' % ns))
|
|
|
|
response.find('.//{%(pec)s}FlowType' % ns).text = message_xml.find('.//{%(pec)s}FlowType' % ns).text
|
|
|
|
# Strangely the same node in the response does not have the same
|
|
# namespace as the node in the request, whatever...
|
|
response.find('.//{%(pec)s}NumeroTeledemarche' % ns).text = message_xml.find(
|
|
'.//{%(mdel)s}NumeroTeledemarche' % ns
|
|
).text
|
|
response.find('.//{%(pec)s}MotDePasse' % ns).text = message_xml.find(
|
|
'.//{%(mdel)s}MotDePasse' % ns
|
|
).text
|
|
response.find('.//{%(pec)s}Etat' % ns).text = '100'
|
|
response.find('.//{%(pec)s}Commentaire' % ns).text = 'Dossier transmis à la collectivité'
|
|
return response
|
|
|
|
def build_response_zip(self, fd_or_filename, etat, commentaire):
|
|
with zipfile.ZipFile(fd_or_filename, 'w') as archive:
|
|
message_xml = self.build_message_xml_retour(etat=etat, commentaire=commentaire)
|
|
archive.writestr(
|
|
'message.xml',
|
|
'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>'
|
|
+ ET.tostring(message_xml, encoding='utf-8').decode(),
|
|
)
|
|
|
|
@property
|
|
def response_zip_filename(self):
|
|
m = FILE_PATTERN.match(self.filename)
|
|
|
|
numero_teledossier = m.group('identifier')
|
|
code_demarche = m.group('procedure')
|
|
id_enveloppe = self.id_enveloppe
|
|
numero_sequence = '1'
|
|
|
|
return '%s-%s-%s-%s.zip' % (numero_teledossier, code_demarche, id_enveloppe, numero_sequence)
|
|
|
|
class Meta:
|
|
verbose_name = _('MDEL request')
|
|
verbose_name_plural = _('MDEL requests')
|
|
unique_together = (('resource', 'filename'),)
|